code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def activate_emu():
"""
This function scans all the open windows and returns a handle to the first known
and supported emulator-game pair.
Args:
None
Returns:
"""
windows = CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly &
kCGWindowListExcludeDesktopElements, kCGNullWindowID)
winname_list = [w.get('kCGWindowName', u'Unknown') for w in windows]
winrect_list = [w['kCGWindowBounds'] for w in windows]
ws = NSWorkspace.sharedWorkspace()
runningApps = ws.runningApplications()
ra_names = [ra.localizedName() for ra in runningApps]
for ii, emu in enumerate(supported_emus):
if emu in ra_names:
if supported_games[ii] in winname_list:
emu_idx = ra_names.index(emu)
runningApps[emu_idx].activateWithOptions_(
NSApplicationActivateIgnoringOtherApps)
idx = winname_list.index(supported_games[ii])
rect = winrect_list[idx]
rect = [rect.get('X'), rect.get('Y'), rect.get('Width'),
rect.get('Height')]
rect = list(map(int, rect))
return rect, emu, supported_games[ii]
return None
if __name__ == '__main__':
print(activate_emu())
<|reserved_special_token_1|>
<|reserved_special_token_0|>
supported_emus = ['OpenEmu']
supported_games = ['Mortal Kombat 3']
def activate_emu():
"""
This function scans all the open windows and returns a handle to the first known
and supported emulator-game pair.
Args:
None
Returns:
"""
windows = CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly &
kCGWindowListExcludeDesktopElements, kCGNullWindowID)
winname_list = [w.get('kCGWindowName', u'Unknown') for w in windows]
winrect_list = [w['kCGWindowBounds'] for w in windows]
ws = NSWorkspace.sharedWorkspace()
runningApps = ws.runningApplications()
ra_names = [ra.localizedName() for ra in runningApps]
for ii, emu in enumerate(supported_emus):
if emu in ra_names:
if supported_games[ii] in winname_list:
emu_idx = ra_names.index(emu)
runningApps[emu_idx].activateWithOptions_(
NSApplicationActivateIgnoringOtherApps)
idx = winname_list.index(supported_games[ii])
rect = winrect_list[idx]
rect = [rect.get('X'), rect.get('Y'), rect.get('Width'),
rect.get('Height')]
rect = list(map(int, rect))
return rect, emu, supported_games[ii]
return None
if __name__ == '__main__':
print(activate_emu())
<|reserved_special_token_1|>
from AppKit import NSWorkspace, NSApplicationActivateIgnoringOtherApps
from Quartz import CGWindowListCopyWindowInfo, kCGWindowListOptionOnScreenOnly
from Quartz import kCGWindowListExcludeDesktopElements, kCGNullWindowID
supported_emus = ['OpenEmu']
supported_games = ['Mortal Kombat 3']
def activate_emu():
"""
This function scans all the open windows and returns a handle to the first known
and supported emulator-game pair.
Args:
None
Returns:
"""
windows = CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly &
kCGWindowListExcludeDesktopElements, kCGNullWindowID)
winname_list = [w.get('kCGWindowName', u'Unknown') for w in windows]
winrect_list = [w['kCGWindowBounds'] for w in windows]
ws = NSWorkspace.sharedWorkspace()
runningApps = ws.runningApplications()
ra_names = [ra.localizedName() for ra in runningApps]
for ii, emu in enumerate(supported_emus):
if emu in ra_names:
if supported_games[ii] in winname_list:
emu_idx = ra_names.index(emu)
runningApps[emu_idx].activateWithOptions_(
NSApplicationActivateIgnoringOtherApps)
idx = winname_list.index(supported_games[ii])
rect = winrect_list[idx]
rect = [rect.get('X'), rect.get('Y'), rect.get('Width'),
rect.get('Height')]
rect = list(map(int, rect))
return rect, emu, supported_games[ii]
return None
if __name__ == '__main__':
print(activate_emu())
<|reserved_special_token_1|>
# =============================================================================
# Created By : Mohsen Malmir
# Created Date: Fri Nov 09 8:10 PM EST 2018
# Purpose : this file implements the gui handling to interact with emulators
# =============================================================================
from AppKit import NSWorkspace,NSApplicationActivateIgnoringOtherApps
from Quartz import CGWindowListCopyWindowInfo,kCGWindowListOptionOnScreenOnly
from Quartz import kCGWindowListExcludeDesktopElements,kCGNullWindowID
# this is a list of pairs of (emulator, game) that is supported to interact with
supported_emus = ["OpenEmu"]
supported_games = ["Mortal Kombat 3"]
def activate_emu():
"""
This function scans all the open windows and returns a handle to the first known
and supported emulator-game pair.
Args:
None
Returns:
"""
# get a list of all open windows
windows = CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly&kCGWindowListExcludeDesktopElements,kCGNullWindowID)
winname_list = [w.get("kCGWindowName", u"Unknown") for w in windows]
winrect_list = [w["kCGWindowBounds"] for w in windows]
# first find the Emulator
ws = NSWorkspace.sharedWorkspace()
runningApps = ws.runningApplications()
# the running processes are checked by their localized name, e.g. "OpenEmu"
ra_names = [ra.localizedName() for ra in runningApps]
for ii, emu in enumerate(supported_emus):
if emu in ra_names: # if a supported emu is found, check for corresponding games
if supported_games[ii] in winname_list: # we foudn a supported game of the target emu
# activate the emu window
emu_idx = ra_names.index(emu)
runningApps[emu_idx].activateWithOptions_(NSApplicationActivateIgnoringOtherApps)
# get the window coordinates
idx = winname_list.index(supported_games[ii])
rect = winrect_list[idx]
rect = [rect.get("X"),rect.get("Y"),rect.get("Width"),rect.get("Height")]
rect = list(map(int,rect))
return rect, emu, supported_games[ii]
return None
if __name__ == "__main__":
print(activate_emu())
|
flexible
|
{
"blob_id": "043ea0efd490522de4f6ee4913c8d66029b34ff5",
"index": 5136,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef activate_emu():\n \"\"\"\n This function scans all the open windows and returns a handle to the first known\n and supported emulator-game pair.\n Args:\n None\n Returns:\n \n \"\"\"\n windows = CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly &\n kCGWindowListExcludeDesktopElements, kCGNullWindowID)\n winname_list = [w.get('kCGWindowName', u'Unknown') for w in windows]\n winrect_list = [w['kCGWindowBounds'] for w in windows]\n ws = NSWorkspace.sharedWorkspace()\n runningApps = ws.runningApplications()\n ra_names = [ra.localizedName() for ra in runningApps]\n for ii, emu in enumerate(supported_emus):\n if emu in ra_names:\n if supported_games[ii] in winname_list:\n emu_idx = ra_names.index(emu)\n runningApps[emu_idx].activateWithOptions_(\n NSApplicationActivateIgnoringOtherApps)\n idx = winname_list.index(supported_games[ii])\n rect = winrect_list[idx]\n rect = [rect.get('X'), rect.get('Y'), rect.get('Width'),\n rect.get('Height')]\n rect = list(map(int, rect))\n return rect, emu, supported_games[ii]\n return None\n\n\nif __name__ == '__main__':\n print(activate_emu())\n",
"step-3": "<mask token>\nsupported_emus = ['OpenEmu']\nsupported_games = ['Mortal Kombat 3']\n\n\ndef activate_emu():\n \"\"\"\n This function scans all the open windows and returns a handle to the first known\n and supported emulator-game pair.\n Args:\n None\n Returns:\n \n \"\"\"\n windows = CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly &\n kCGWindowListExcludeDesktopElements, kCGNullWindowID)\n winname_list = [w.get('kCGWindowName', u'Unknown') for w in windows]\n winrect_list = [w['kCGWindowBounds'] for w in windows]\n ws = NSWorkspace.sharedWorkspace()\n runningApps = ws.runningApplications()\n ra_names = [ra.localizedName() for ra in runningApps]\n for ii, emu in enumerate(supported_emus):\n if emu in ra_names:\n if supported_games[ii] in winname_list:\n emu_idx = ra_names.index(emu)\n runningApps[emu_idx].activateWithOptions_(\n NSApplicationActivateIgnoringOtherApps)\n idx = winname_list.index(supported_games[ii])\n rect = winrect_list[idx]\n rect = [rect.get('X'), rect.get('Y'), rect.get('Width'),\n rect.get('Height')]\n rect = list(map(int, rect))\n return rect, emu, supported_games[ii]\n return None\n\n\nif __name__ == '__main__':\n print(activate_emu())\n",
"step-4": "from AppKit import NSWorkspace, NSApplicationActivateIgnoringOtherApps\nfrom Quartz import CGWindowListCopyWindowInfo, kCGWindowListOptionOnScreenOnly\nfrom Quartz import kCGWindowListExcludeDesktopElements, kCGNullWindowID\nsupported_emus = ['OpenEmu']\nsupported_games = ['Mortal Kombat 3']\n\n\ndef activate_emu():\n \"\"\"\n This function scans all the open windows and returns a handle to the first known\n and supported emulator-game pair.\n Args:\n None\n Returns:\n \n \"\"\"\n windows = CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly &\n kCGWindowListExcludeDesktopElements, kCGNullWindowID)\n winname_list = [w.get('kCGWindowName', u'Unknown') for w in windows]\n winrect_list = [w['kCGWindowBounds'] for w in windows]\n ws = NSWorkspace.sharedWorkspace()\n runningApps = ws.runningApplications()\n ra_names = [ra.localizedName() for ra in runningApps]\n for ii, emu in enumerate(supported_emus):\n if emu in ra_names:\n if supported_games[ii] in winname_list:\n emu_idx = ra_names.index(emu)\n runningApps[emu_idx].activateWithOptions_(\n NSApplicationActivateIgnoringOtherApps)\n idx = winname_list.index(supported_games[ii])\n rect = winrect_list[idx]\n rect = [rect.get('X'), rect.get('Y'), rect.get('Width'),\n rect.get('Height')]\n rect = list(map(int, rect))\n return rect, emu, supported_games[ii]\n return None\n\n\nif __name__ == '__main__':\n print(activate_emu())\n",
"step-5": "# =============================================================================\n# Created By : Mohsen Malmir\n# Created Date: Fri Nov 09 8:10 PM EST 2018\n# Purpose : this file implements the gui handling to interact with emulators\n# =============================================================================\n\nfrom AppKit import NSWorkspace,NSApplicationActivateIgnoringOtherApps\nfrom Quartz import CGWindowListCopyWindowInfo,kCGWindowListOptionOnScreenOnly\nfrom Quartz import kCGWindowListExcludeDesktopElements,kCGNullWindowID\n\n# this is a list of pairs of (emulator, game) that is supported to interact with\nsupported_emus = [\"OpenEmu\"]\nsupported_games = [\"Mortal Kombat 3\"]\n\n\ndef activate_emu():\n \"\"\"\n This function scans all the open windows and returns a handle to the first known\n and supported emulator-game pair.\n Args:\n None\n Returns:\n \n \"\"\"\n # get a list of all open windows\n windows = CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly&kCGWindowListExcludeDesktopElements,kCGNullWindowID)\n winname_list = [w.get(\"kCGWindowName\", u\"Unknown\") for w in windows]\n winrect_list = [w[\"kCGWindowBounds\"] for w in windows]\n # first find the Emulator\n ws = NSWorkspace.sharedWorkspace()\n runningApps = ws.runningApplications()\n # the running processes are checked by their localized name, e.g. \"OpenEmu\"\n ra_names = [ra.localizedName() for ra in runningApps] \n for ii, emu in enumerate(supported_emus):\n if emu in ra_names: # if a supported emu is found, check for corresponding games\n if supported_games[ii] in winname_list: # we foudn a supported game of the target emu\n # activate the emu window\n emu_idx = ra_names.index(emu)\n runningApps[emu_idx].activateWithOptions_(NSApplicationActivateIgnoringOtherApps)\n # get the window coordinates\n idx = winname_list.index(supported_games[ii])\n rect = winrect_list[idx]\n rect = [rect.get(\"X\"),rect.get(\"Y\"),rect.get(\"Width\"),rect.get(\"Height\")]\n rect = list(map(int,rect))\n return rect, emu, supported_games[ii]\n return None\n\nif __name__ == \"__main__\":\n print(activate_emu())\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for reservoir in reservoirs:
storageURL = ('https://cdec.water.ca.gov/dynamicapp/QueryMonthly?s=' +
reservoir[0])
storagePage = requests.get(storageURL)
storageSoup = BeautifulSoup(storagePage.content, 'html.parser')
storageRow = storageSoup.find(text='08/2021').parent.parent
reservoir.append(storageRow.findAll('td')[1].text.strip())
avgURL = 'https://cdec.water.ca.gov/dynamicapp/profile?s=' + reservoir[0
] + '&type=res'
avgPage = requests.get(avgURL)
avgSoup = BeautifulSoup(avgPage.content, 'html.parser')
reservoir.append(avgSoup.find(text='August').parent.parent.parent.
findAll('td')[1].text.strip())
<|reserved_special_token_0|>
writer.writerow(['Reservoir', 'August storage', 'August average'])
writer.writerows(reservoirs)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
reservoirs = [['LVQ'], ['HTH'], ['APN'], ['KNT'], ['SHA']]
for reservoir in reservoirs:
storageURL = ('https://cdec.water.ca.gov/dynamicapp/QueryMonthly?s=' +
reservoir[0])
storagePage = requests.get(storageURL)
storageSoup = BeautifulSoup(storagePage.content, 'html.parser')
storageRow = storageSoup.find(text='08/2021').parent.parent
reservoir.append(storageRow.findAll('td')[1].text.strip())
avgURL = 'https://cdec.water.ca.gov/dynamicapp/profile?s=' + reservoir[0
] + '&type=res'
avgPage = requests.get(avgURL)
avgSoup = BeautifulSoup(avgPage.content, 'html.parser')
reservoir.append(avgSoup.find(text='August').parent.parent.parent.
findAll('td')[1].text.strip())
outfile = open('./water-data-all-august.csv', 'wb')
writer = csv.writer(outfile)
writer.writerow(['Reservoir', 'August storage', 'August average'])
writer.writerows(reservoirs)
<|reserved_special_token_1|>
import requests
import csv
from bs4 import BeautifulSoup
reservoirs = [['LVQ'], ['HTH'], ['APN'], ['KNT'], ['SHA']]
for reservoir in reservoirs:
storageURL = ('https://cdec.water.ca.gov/dynamicapp/QueryMonthly?s=' +
reservoir[0])
storagePage = requests.get(storageURL)
storageSoup = BeautifulSoup(storagePage.content, 'html.parser')
storageRow = storageSoup.find(text='08/2021').parent.parent
reservoir.append(storageRow.findAll('td')[1].text.strip())
avgURL = 'https://cdec.water.ca.gov/dynamicapp/profile?s=' + reservoir[0
] + '&type=res'
avgPage = requests.get(avgURL)
avgSoup = BeautifulSoup(avgPage.content, 'html.parser')
reservoir.append(avgSoup.find(text='August').parent.parent.parent.
findAll('td')[1].text.strip())
outfile = open('./water-data-all-august.csv', 'wb')
writer = csv.writer(outfile)
writer.writerow(['Reservoir', 'August storage', 'August average'])
writer.writerows(reservoirs)
<|reserved_special_token_1|>
import requests
import csv
from bs4 import BeautifulSoup
reservoirs = [["LVQ"], ["HTH"], ["APN"], ["KNT"], ["SHA"]]
for reservoir in reservoirs:
storageURL = "https://cdec.water.ca.gov/dynamicapp/QueryMonthly?s=" + reservoir[0]
storagePage = requests.get(storageURL)
storageSoup = BeautifulSoup(storagePage.content, "html.parser")
storageRow = storageSoup.find(text="08/2021").parent.parent
reservoir.append(storageRow.findAll('td')[1].text.strip())
avgURL = "https://cdec.water.ca.gov/dynamicapp/profile?s=" + reservoir[0] + "&type=res"
avgPage = requests.get(avgURL)
avgSoup = BeautifulSoup(avgPage.content, "html.parser")
reservoir.append(avgSoup.find(text="August").parent.parent.parent.findAll('td')[1].text.strip())
####################
outfile = open("./water-data-all-august.csv", "wb")
writer = csv.writer(outfile)
writer.writerow(["Reservoir", "August storage", "August average"])
writer.writerows(reservoirs)
|
flexible
|
{
"blob_id": "ebe7c245e3e14116a37020971e67ada054e0b434",
"index": 1171,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor reservoir in reservoirs:\n storageURL = ('https://cdec.water.ca.gov/dynamicapp/QueryMonthly?s=' +\n reservoir[0])\n storagePage = requests.get(storageURL)\n storageSoup = BeautifulSoup(storagePage.content, 'html.parser')\n storageRow = storageSoup.find(text='08/2021').parent.parent\n reservoir.append(storageRow.findAll('td')[1].text.strip())\n avgURL = 'https://cdec.water.ca.gov/dynamicapp/profile?s=' + reservoir[0\n ] + '&type=res'\n avgPage = requests.get(avgURL)\n avgSoup = BeautifulSoup(avgPage.content, 'html.parser')\n reservoir.append(avgSoup.find(text='August').parent.parent.parent.\n findAll('td')[1].text.strip())\n<mask token>\nwriter.writerow(['Reservoir', 'August storage', 'August average'])\nwriter.writerows(reservoirs)\n",
"step-3": "<mask token>\nreservoirs = [['LVQ'], ['HTH'], ['APN'], ['KNT'], ['SHA']]\nfor reservoir in reservoirs:\n storageURL = ('https://cdec.water.ca.gov/dynamicapp/QueryMonthly?s=' +\n reservoir[0])\n storagePage = requests.get(storageURL)\n storageSoup = BeautifulSoup(storagePage.content, 'html.parser')\n storageRow = storageSoup.find(text='08/2021').parent.parent\n reservoir.append(storageRow.findAll('td')[1].text.strip())\n avgURL = 'https://cdec.water.ca.gov/dynamicapp/profile?s=' + reservoir[0\n ] + '&type=res'\n avgPage = requests.get(avgURL)\n avgSoup = BeautifulSoup(avgPage.content, 'html.parser')\n reservoir.append(avgSoup.find(text='August').parent.parent.parent.\n findAll('td')[1].text.strip())\noutfile = open('./water-data-all-august.csv', 'wb')\nwriter = csv.writer(outfile)\nwriter.writerow(['Reservoir', 'August storage', 'August average'])\nwriter.writerows(reservoirs)\n",
"step-4": "import requests\nimport csv\nfrom bs4 import BeautifulSoup\nreservoirs = [['LVQ'], ['HTH'], ['APN'], ['KNT'], ['SHA']]\nfor reservoir in reservoirs:\n storageURL = ('https://cdec.water.ca.gov/dynamicapp/QueryMonthly?s=' +\n reservoir[0])\n storagePage = requests.get(storageURL)\n storageSoup = BeautifulSoup(storagePage.content, 'html.parser')\n storageRow = storageSoup.find(text='08/2021').parent.parent\n reservoir.append(storageRow.findAll('td')[1].text.strip())\n avgURL = 'https://cdec.water.ca.gov/dynamicapp/profile?s=' + reservoir[0\n ] + '&type=res'\n avgPage = requests.get(avgURL)\n avgSoup = BeautifulSoup(avgPage.content, 'html.parser')\n reservoir.append(avgSoup.find(text='August').parent.parent.parent.\n findAll('td')[1].text.strip())\noutfile = open('./water-data-all-august.csv', 'wb')\nwriter = csv.writer(outfile)\nwriter.writerow(['Reservoir', 'August storage', 'August average'])\nwriter.writerows(reservoirs)\n",
"step-5": "import requests\nimport csv\nfrom bs4 import BeautifulSoup\n\nreservoirs = [[\"LVQ\"], [\"HTH\"], [\"APN\"], [\"KNT\"], [\"SHA\"]]\n\nfor reservoir in reservoirs:\n storageURL = \"https://cdec.water.ca.gov/dynamicapp/QueryMonthly?s=\" + reservoir[0]\n storagePage = requests.get(storageURL)\n storageSoup = BeautifulSoup(storagePage.content, \"html.parser\")\n storageRow = storageSoup.find(text=\"08/2021\").parent.parent\n reservoir.append(storageRow.findAll('td')[1].text.strip())\n\n avgURL = \"https://cdec.water.ca.gov/dynamicapp/profile?s=\" + reservoir[0] + \"&type=res\"\n avgPage = requests.get(avgURL)\n avgSoup = BeautifulSoup(avgPage.content, \"html.parser\")\n reservoir.append(avgSoup.find(text=\"August\").parent.parent.parent.findAll('td')[1].text.strip())\n\n####################\n\noutfile = open(\"./water-data-all-august.csv\", \"wb\")\nwriter = csv.writer(outfile)\nwriter.writerow([\"Reservoir\", \"August storage\", \"August average\"])\nwriter.writerows(reservoirs)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def month_bounds(year, month):
"""
Returns a tuple of datetime objects (month_start,month_end) given a year and month.
Both params are strings because we want month to be a two digit month representation
and python doesn't handle leading zeros in integers as we want.
:param year: four digit year as a string e.g. "2016"
:param month: 2 digit month as a string e.g. 2 for February, 11 for November
"""
year = int(year)
month = int(month)
month_start = datetime.strptime('%s,%s,1' % (year, month), '%Y,%m,%d')
days_in_month = calendar.monthrange(year, month)
month_end = month_start + timedelta(days=days_in_month[1] - 1)
return month_start, month_end
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from datetime import datetime, timedelta
import calendar
def month_bounds(year, month):
"""
Returns a tuple of datetime objects (month_start,month_end) given a year and month.
Both params are strings because we want month to be a two digit month representation
and python doesn't handle leading zeros in integers as we want.
:param year: four digit year as a string e.g. "2016"
:param month: 2 digit month as a string e.g. 2 for February, 11 for November
"""
year = int(year)
month = int(month)
month_start = datetime.strptime('%s,%s,1' % (year, month), '%Y,%m,%d')
days_in_month = calendar.monthrange(year, month)
month_end = month_start + timedelta(days=days_in_month[1] - 1)
return month_start, month_end
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
helpers
~~~~~~~
Implements various helper functions.
:copyright: (c) 2016 by Patrick Spencer.
:license: Apache 2.0, see LICENSE for more details.
"""
from datetime import datetime, timedelta
import calendar
def month_bounds(year, month):
"""
Returns a tuple of datetime objects (month_start,month_end) given a year and month.
Both params are strings because we want month to be a two digit month representation
and python doesn't handle leading zeros in integers as we want.
:param year: four digit year as a string e.g. "2016"
:param month: 2 digit month as a string e.g. 2 for February, 11 for November
"""
year = int(year)
month = int(month)
month_start = datetime.strptime('%s,%s,1' % (year, month),'%Y,%m,%d')
# days_in_month returns a tuple(weekday, days) where
# weekday is the eekday the month starts on and days is the number of days in the month
days_in_month = calendar.monthrange(year,month)
month_end = month_start + timedelta(days=days_in_month[1]-1)
return (month_start, month_end)
|
flexible
|
{
"blob_id": "4c5416582afb3cfeb56259954cda2701ea26f8cd",
"index": 7780,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef month_bounds(year, month):\n \"\"\"\n Returns a tuple of datetime objects (month_start,month_end) given a year and month.\n Both params are strings because we want month to be a two digit month representation\n and python doesn't handle leading zeros in integers as we want.\n\n :param year: four digit year as a string e.g. \"2016\"\n :param month: 2 digit month as a string e.g. 2 for February, 11 for November\n \"\"\"\n year = int(year)\n month = int(month)\n month_start = datetime.strptime('%s,%s,1' % (year, month), '%Y,%m,%d')\n days_in_month = calendar.monthrange(year, month)\n month_end = month_start + timedelta(days=days_in_month[1] - 1)\n return month_start, month_end\n",
"step-3": "<mask token>\nfrom datetime import datetime, timedelta\nimport calendar\n\n\ndef month_bounds(year, month):\n \"\"\"\n Returns a tuple of datetime objects (month_start,month_end) given a year and month.\n Both params are strings because we want month to be a two digit month representation\n and python doesn't handle leading zeros in integers as we want.\n\n :param year: four digit year as a string e.g. \"2016\"\n :param month: 2 digit month as a string e.g. 2 for February, 11 for November\n \"\"\"\n year = int(year)\n month = int(month)\n month_start = datetime.strptime('%s,%s,1' % (year, month), '%Y,%m,%d')\n days_in_month = calendar.monthrange(year, month)\n month_end = month_start + timedelta(days=days_in_month[1] - 1)\n return month_start, month_end\n",
"step-4": "# -*- coding: utf-8 -*-\n\"\"\"\n helpers\n ~~~~~~~\n Implements various helper functions.\n\n :copyright: (c) 2016 by Patrick Spencer.\n :license: Apache 2.0, see LICENSE for more details.\n\"\"\"\n\nfrom datetime import datetime, timedelta\nimport calendar\n\ndef month_bounds(year, month):\n \"\"\"\n Returns a tuple of datetime objects (month_start,month_end) given a year and month.\n Both params are strings because we want month to be a two digit month representation\n and python doesn't handle leading zeros in integers as we want.\n\n :param year: four digit year as a string e.g. \"2016\"\n :param month: 2 digit month as a string e.g. 2 for February, 11 for November\n \"\"\"\n year = int(year)\n month = int(month)\n month_start = datetime.strptime('%s,%s,1' % (year, month),'%Y,%m,%d')\n # days_in_month returns a tuple(weekday, days) where\n # weekday is the eekday the month starts on and days is the number of days in the month\n days_in_month = calendar.monthrange(year,month)\n month_end = month_start + timedelta(days=days_in_month[1]-1)\n return (month_start, month_end)\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
def findFirst(arr, l, h, x):
if l > h:
return -1
mid = (l + h) // 2
if arr[mid] == x:
return mid
elif arr[mid] > x:
return findFirst(arr, l, mid - 1, x)
return findFirst(arr, mid + 1, h, x)
def indexes(arr, x):
n = len(arr)
ind = findFirst(arr, 0, n - 1, x)
if ind == -1:
return [-1, -1]
l = u = ind
for i in range(ind + 1, n):
if arr[i] == x:
u = i
else:
break
for i in range(ind - 1, -1, -1):
if arr[i] == x:
l = i
else:
break
return [l, u]
print(indexes([1, 2, 5, 5, 5, 5, 5, 12, 45, 67], 5))
|
normal
|
{
"blob_id": "b4783540224902b10088edbd038d6d664934a237",
"index": 4893,
"step-1": "<mask token>\n",
"step-2": "def findFirst(arr, l, h, x):\n if l > h:\n return -1\n mid = (l + h) // 2\n if arr[mid] == x:\n return mid\n elif arr[mid] > x:\n return findFirst(arr, l, mid - 1, x)\n return findFirst(arr, mid + 1, h, x)\n\n\n<mask token>\n",
"step-3": "def findFirst(arr, l, h, x):\n if l > h:\n return -1\n mid = (l + h) // 2\n if arr[mid] == x:\n return mid\n elif arr[mid] > x:\n return findFirst(arr, l, mid - 1, x)\n return findFirst(arr, mid + 1, h, x)\n\n\ndef indexes(arr, x):\n n = len(arr)\n ind = findFirst(arr, 0, n - 1, x)\n if ind == -1:\n return [-1, -1]\n l = u = ind\n for i in range(ind + 1, n):\n if arr[i] == x:\n u = i\n else:\n break\n for i in range(ind - 1, -1, -1):\n if arr[i] == x:\n l = i\n else:\n break\n return [l, u]\n\n\n<mask token>\n",
"step-4": "def findFirst(arr, l, h, x):\n if l > h:\n return -1\n mid = (l + h) // 2\n if arr[mid] == x:\n return mid\n elif arr[mid] > x:\n return findFirst(arr, l, mid - 1, x)\n return findFirst(arr, mid + 1, h, x)\n\n\ndef indexes(arr, x):\n n = len(arr)\n ind = findFirst(arr, 0, n - 1, x)\n if ind == -1:\n return [-1, -1]\n l = u = ind\n for i in range(ind + 1, n):\n if arr[i] == x:\n u = i\n else:\n break\n for i in range(ind - 1, -1, -1):\n if arr[i] == x:\n l = i\n else:\n break\n return [l, u]\n\n\nprint(indexes([1, 2, 5, 5, 5, 5, 5, 12, 45, 67], 5))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def where_is_the_iss_now():
iss_now_website = 'http://api.open-notify.org/iss-now.json'
webby = requests.get(iss_now_website)
data = webby.json()
if data['iss_position']:
longitude = data['iss_position'].get('longitude')
latitude = data['iss_position'].get('latitude')
results = rg.search((latitude, longitude), mode=1)
lat, lon, name, admin1, admin2, cc = results[0].values()
ordinal = find_ordinals(city=(float(lat), float(lon)), iss=(float(
latitude), float(longitude)))
country_cc = requests.get(
'https://pkgstore.datahub.io/core/country-list/data_json/data/8c458f2d15d9f2119654b29ede6e45b8/data_json.json'
)
country_cc = country_cc.json()
iss_coordinates = latitude, longitude
k_nearest_coordinates = lat, lon
distance_miles = distance.distance(k_nearest_coordinates, iss_coordinates
).miles
country_name = ''
for i in filter(lambda d: d.get('Code') == cc, country_cc):
country_name = i.get('Name')
location_text = ', '.join([name, admin1, country_name])
if distance_miles > 150:
answer = (
'The International Space Station is {} miles {} off the coast of {}'
.format(int(distance_miles), ordinal, location_text))
else:
answer = ('the International Space Station is {} miles {} near {}'.
format(int(distance_miles), ordinal, location_text))
return (answer, latitude, longitude, distance_miles, ordinal, name,
admin1, country_name)
@app.route('/')
def homepage():
return ''
@ask.launch
def start_skill():
welcome_message_reprompt = render_template('welcome_message_reprompt')
welcome_message = render_template('welcome_message')
return question(welcome_message).reprompt(welcome_message_reprompt)
<|reserved_special_token_0|>
@ask.intent('WhereISS')
def share_location():
(iss_location, latitude, longitude, distance_miles, ordinal, name,
admin1, country_name) = where_is_the_iss_now()
latitude, longitude, distance_miles = float(latitude), float(longitude
), float(distance_miles)
return statement(iss_location).standard_card(title=
'Location of the International Space Station', text=
"""Latitude {} and Longitude {},
{} miles {} of {}, {} in {}""".
format(round(latitude, 2), round(longitude, 2), round(
distance_miles, 0), ordinal, name, admin1, country_name))
@ask.intent('AMAZON.FallbackIntent')
def fallback():
to_continue = render_template('to_continue')
return question('Sorry, I am not sure what you asked me...{}'.format(
to_continue))
<|reserved_special_token_0|>
@ask.intent('AMAZON.HelpIntent')
def help_me():
help_me_text = render_template('help')
return question(help_me_text)
<|reserved_special_token_0|>
@ask.intent('AMAZON.StopIntent')
def stop():
bye_text = render_template('bye')
return statement(bye_text)
@ask.intent('AMAZON.CancelIntent')
def cancel():
bye_text = render_template('bye')
return statement(bye_text)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def where_is_the_iss_now():
iss_now_website = 'http://api.open-notify.org/iss-now.json'
webby = requests.get(iss_now_website)
data = webby.json()
if data['iss_position']:
longitude = data['iss_position'].get('longitude')
latitude = data['iss_position'].get('latitude')
results = rg.search((latitude, longitude), mode=1)
lat, lon, name, admin1, admin2, cc = results[0].values()
ordinal = find_ordinals(city=(float(lat), float(lon)), iss=(float(
latitude), float(longitude)))
country_cc = requests.get(
'https://pkgstore.datahub.io/core/country-list/data_json/data/8c458f2d15d9f2119654b29ede6e45b8/data_json.json'
)
country_cc = country_cc.json()
iss_coordinates = latitude, longitude
k_nearest_coordinates = lat, lon
distance_miles = distance.distance(k_nearest_coordinates, iss_coordinates
).miles
country_name = ''
for i in filter(lambda d: d.get('Code') == cc, country_cc):
country_name = i.get('Name')
location_text = ', '.join([name, admin1, country_name])
if distance_miles > 150:
answer = (
'The International Space Station is {} miles {} off the coast of {}'
.format(int(distance_miles), ordinal, location_text))
else:
answer = ('the International Space Station is {} miles {} near {}'.
format(int(distance_miles), ordinal, location_text))
return (answer, latitude, longitude, distance_miles, ordinal, name,
admin1, country_name)
@app.route('/')
def homepage():
return ''
@ask.launch
def start_skill():
welcome_message_reprompt = render_template('welcome_message_reprompt')
welcome_message = render_template('welcome_message')
return question(welcome_message).reprompt(welcome_message_reprompt)
<|reserved_special_token_0|>
@ask.intent('WhereISS')
def share_location():
(iss_location, latitude, longitude, distance_miles, ordinal, name,
admin1, country_name) = where_is_the_iss_now()
latitude, longitude, distance_miles = float(latitude), float(longitude
), float(distance_miles)
return statement(iss_location).standard_card(title=
'Location of the International Space Station', text=
"""Latitude {} and Longitude {},
{} miles {} of {}, {} in {}""".
format(round(latitude, 2), round(longitude, 2), round(
distance_miles, 0), ordinal, name, admin1, country_name))
@ask.intent('AMAZON.FallbackIntent')
def fallback():
to_continue = render_template('to_continue')
return question('Sorry, I am not sure what you asked me...{}'.format(
to_continue))
@ask.intent('AMAZON.NavigateHomeIntent')
def go_home():
return question('et - phone home')
@ask.intent('AMAZON.HelpIntent')
def help_me():
help_me_text = render_template('help')
return question(help_me_text)
<|reserved_special_token_0|>
@ask.intent('AMAZON.StopIntent')
def stop():
bye_text = render_template('bye')
return statement(bye_text)
@ask.intent('AMAZON.CancelIntent')
def cancel():
bye_text = render_template('bye')
return statement(bye_text)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def where_is_the_iss_now():
iss_now_website = 'http://api.open-notify.org/iss-now.json'
webby = requests.get(iss_now_website)
data = webby.json()
if data['iss_position']:
longitude = data['iss_position'].get('longitude')
latitude = data['iss_position'].get('latitude')
results = rg.search((latitude, longitude), mode=1)
lat, lon, name, admin1, admin2, cc = results[0].values()
ordinal = find_ordinals(city=(float(lat), float(lon)), iss=(float(
latitude), float(longitude)))
country_cc = requests.get(
'https://pkgstore.datahub.io/core/country-list/data_json/data/8c458f2d15d9f2119654b29ede6e45b8/data_json.json'
)
country_cc = country_cc.json()
iss_coordinates = latitude, longitude
k_nearest_coordinates = lat, lon
distance_miles = distance.distance(k_nearest_coordinates, iss_coordinates
).miles
country_name = ''
for i in filter(lambda d: d.get('Code') == cc, country_cc):
country_name = i.get('Name')
location_text = ', '.join([name, admin1, country_name])
if distance_miles > 150:
answer = (
'The International Space Station is {} miles {} off the coast of {}'
.format(int(distance_miles), ordinal, location_text))
else:
answer = ('the International Space Station is {} miles {} near {}'.
format(int(distance_miles), ordinal, location_text))
return (answer, latitude, longitude, distance_miles, ordinal, name,
admin1, country_name)
@app.route('/')
def homepage():
return ''
@ask.launch
def start_skill():
welcome_message_reprompt = render_template('welcome_message_reprompt')
welcome_message = render_template('welcome_message')
return question(welcome_message).reprompt(welcome_message_reprompt)
@ask.intent('YourLocation')
def pass_over(my_location):
geolocator = Nominatim(user_agent='my-application')
print(my_location)
location = geolocator.geocode(my_location, language='en-US')
try:
city = location.address.split(',')[0]
state = location.address.split(',')[2]
country = location.address.split(',')[-1]
location_name = ', '.join([city, state, country])
except IndexError:
location_name = location.address.split(',')[-1]
fly_over = requests.get(
'http://api.open-notify.org/iss-pass.json?lat={}&lon={}'.format(
location.latitude, location.longitude))
fly_over = fly_over.json()
if fly_over['message'] == 'success':
rise = fly_over['response'][0]
answer = time.strftime('%A, %B %d, %Y at %I:%M %p GMT', time.
localtime(rise.get('risetime')))
a = rise.get('risetime')
b = time.time()
c = a - b
hours = c // 3600 % 24
minutes = c // 60 % 60
minutes = int(minutes)
hours = int(hours)
if minutes == 1:
minorminutes = 'minute'
else:
minorminutes = 'minutes'
if hours == 1:
hour_or_hours = 'hour'
else:
hour_or_hours = 'hours'
if hours == 0:
time_til_rise = '{} {}'.format(minutes, minorminutes)
else:
time_til_rise = '{} {} and {} {}'.format(hours, hour_or_hours,
minutes, minorminutes)
else:
answer = 'failure'
return statement('the next flyover for {} will begin in {} on {}'.
format(location_name, time_til_rise, answer))
@ask.intent('WhereISS')
def share_location():
(iss_location, latitude, longitude, distance_miles, ordinal, name,
admin1, country_name) = where_is_the_iss_now()
latitude, longitude, distance_miles = float(latitude), float(longitude
), float(distance_miles)
return statement(iss_location).standard_card(title=
'Location of the International Space Station', text=
"""Latitude {} and Longitude {},
{} miles {} of {}, {} in {}""".
format(round(latitude, 2), round(longitude, 2), round(
distance_miles, 0), ordinal, name, admin1, country_name))
@ask.intent('AMAZON.FallbackIntent')
def fallback():
to_continue = render_template('to_continue')
return question('Sorry, I am not sure what you asked me...{}'.format(
to_continue))
@ask.intent('AMAZON.NavigateHomeIntent')
def go_home():
return question('et - phone home')
@ask.intent('AMAZON.HelpIntent')
def help_me():
help_me_text = render_template('help')
return question(help_me_text)
<|reserved_special_token_0|>
@ask.intent('AMAZON.StopIntent')
def stop():
bye_text = render_template('bye')
return statement(bye_text)
@ask.intent('AMAZON.CancelIntent')
def cancel():
bye_text = render_template('bye')
return statement(bye_text)
@ask.session_ended
def session_ended():
return '{}', 200
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def where_is_the_iss_now():
iss_now_website = 'http://api.open-notify.org/iss-now.json'
webby = requests.get(iss_now_website)
data = webby.json()
if data['iss_position']:
longitude = data['iss_position'].get('longitude')
latitude = data['iss_position'].get('latitude')
results = rg.search((latitude, longitude), mode=1)
lat, lon, name, admin1, admin2, cc = results[0].values()
ordinal = find_ordinals(city=(float(lat), float(lon)), iss=(float(
latitude), float(longitude)))
country_cc = requests.get(
'https://pkgstore.datahub.io/core/country-list/data_json/data/8c458f2d15d9f2119654b29ede6e45b8/data_json.json'
)
country_cc = country_cc.json()
iss_coordinates = latitude, longitude
k_nearest_coordinates = lat, lon
distance_miles = distance.distance(k_nearest_coordinates, iss_coordinates
).miles
country_name = ''
for i in filter(lambda d: d.get('Code') == cc, country_cc):
country_name = i.get('Name')
location_text = ', '.join([name, admin1, country_name])
if distance_miles > 150:
answer = (
'The International Space Station is {} miles {} off the coast of {}'
.format(int(distance_miles), ordinal, location_text))
else:
answer = ('the International Space Station is {} miles {} near {}'.
format(int(distance_miles), ordinal, location_text))
return (answer, latitude, longitude, distance_miles, ordinal, name,
admin1, country_name)
@app.route('/')
def homepage():
return ''
@ask.launch
def start_skill():
welcome_message_reprompt = render_template('welcome_message_reprompt')
welcome_message = render_template('welcome_message')
return question(welcome_message).reprompt(welcome_message_reprompt)
@ask.intent('YourLocation')
def pass_over(my_location):
geolocator = Nominatim(user_agent='my-application')
print(my_location)
location = geolocator.geocode(my_location, language='en-US')
try:
city = location.address.split(',')[0]
state = location.address.split(',')[2]
country = location.address.split(',')[-1]
location_name = ', '.join([city, state, country])
except IndexError:
location_name = location.address.split(',')[-1]
fly_over = requests.get(
'http://api.open-notify.org/iss-pass.json?lat={}&lon={}'.format(
location.latitude, location.longitude))
fly_over = fly_over.json()
if fly_over['message'] == 'success':
rise = fly_over['response'][0]
answer = time.strftime('%A, %B %d, %Y at %I:%M %p GMT', time.
localtime(rise.get('risetime')))
a = rise.get('risetime')
b = time.time()
c = a - b
hours = c // 3600 % 24
minutes = c // 60 % 60
minutes = int(minutes)
hours = int(hours)
if minutes == 1:
minorminutes = 'minute'
else:
minorminutes = 'minutes'
if hours == 1:
hour_or_hours = 'hour'
else:
hour_or_hours = 'hours'
if hours == 0:
time_til_rise = '{} {}'.format(minutes, minorminutes)
else:
time_til_rise = '{} {} and {} {}'.format(hours, hour_or_hours,
minutes, minorminutes)
else:
answer = 'failure'
return statement('the next flyover for {} will begin in {} on {}'.
format(location_name, time_til_rise, answer))
@ask.intent('WhereISS')
def share_location():
(iss_location, latitude, longitude, distance_miles, ordinal, name,
admin1, country_name) = where_is_the_iss_now()
latitude, longitude, distance_miles = float(latitude), float(longitude
), float(distance_miles)
return statement(iss_location).standard_card(title=
'Location of the International Space Station', text=
"""Latitude {} and Longitude {},
{} miles {} of {}, {} in {}""".
format(round(latitude, 2), round(longitude, 2), round(
distance_miles, 0), ordinal, name, admin1, country_name))
@ask.intent('AMAZON.FallbackIntent')
def fallback():
to_continue = render_template('to_continue')
return question('Sorry, I am not sure what you asked me...{}'.format(
to_continue))
@ask.intent('AMAZON.NavigateHomeIntent')
def go_home():
return question('et - phone home')
@ask.intent('AMAZON.HelpIntent')
def help_me():
help_me_text = render_template('help')
return question(help_me_text)
@ask.intent('Credits')
def speak_credits():
credits_ = render_template('credits')
return statement(credits_)
@ask.intent('AMAZON.StopIntent')
def stop():
bye_text = render_template('bye')
return statement(bye_text)
@ask.intent('AMAZON.CancelIntent')
def cancel():
bye_text = render_template('bye')
return statement(bye_text)
@ask.session_ended
def session_ended():
return '{}', 200
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from flask import Flask, render_template
from flask_ask import Ask, statement, question, session
import reverse_geocoder as rg
from geopy import distance
from geopy.geocoders import Nominatim
import requests
import time
'''
:::::::: ::::::::: ::: :::::::: :::::::::: ::: ::: ::: ::: ::: ::: :::
:+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+:
+:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+
+#++:++#++ +#++:++#+ +#++:++#++: +#+ +#++:++# +#+ +:+ +#+ +#++:++#++: +#+ +#++:++ +#+
+#+ +#+ +#+ +#+ +#+ +#+ +#+ +#+#+ +#+ +#+ +#+ +#+ +#+ +#+ +#+
#+# #+# #+# #+# #+# #+# #+# #+# #+#+# #+#+# #+# #+# #+# #+# #+#
######## ### ### ### ######## ########## ### ### ### ### ########## ### ### ###
'''
app = Flask(__name__)
ask = Ask(app, "/space_walk")
def find_ordinals(city, iss):
'''
Take tuple coordinates (lat, lon) for City and ISS and
find the cardinal direction of NE, SE, SW, NW
'''
if iss[0] - city[0] > 0:
a = 'North'
else:
a = 'South'
if iss[1] - city[1] > 0:
b = 'East'
else:
b = 'West'
return ''.join([a, b])
def where_is_the_iss_now():
iss_now_website = 'http://api.open-notify.org/iss-now.json'
webby = requests.get(iss_now_website)
data = webby.json()
if data['iss_position']:
longitude = data['iss_position'].get('longitude')
latitude = data['iss_position'].get('latitude')
results = rg.search((latitude, longitude), mode=1)
lat, lon, name, admin1, admin2, cc = results[0].values()
ordinal = find_ordinals(city=(float(lat), float(lon)), iss=(float(latitude), float(longitude)))
country_cc = requests.get(
'https://pkgstore.datahub.io/core/country-list/data_json/data/8c458f2d15d9f2119654b29ede6e45b8/data_json.json')
country_cc = country_cc.json()
iss_coordinates = (latitude, longitude)
k_nearest_coordinates = (lat, lon)
distance_miles = distance.distance(k_nearest_coordinates, iss_coordinates).miles
country_name = ''
for i in filter(lambda d: d.get('Code') == cc, country_cc):
country_name = i.get('Name')
location_text = ', '.join([name, admin1, country_name])
if distance_miles > 150:
answer = 'The International Space Station is {} miles {} off the coast of {}'.format(int(distance_miles), ordinal,
location_text)
else:
answer = 'the International Space Station is {} miles {} near {}'.format(int(distance_miles),ordinal, location_text)
return answer, latitude, longitude, distance_miles, ordinal, name, admin1, country_name
@app.route('/')
def homepage():
return ''
@ask.launch
def start_skill():
# welcome_message = 'Welcome to the Fleet Feet Journal! What is your name?'
welcome_message_reprompt = render_template('welcome_message_reprompt')
welcome_message = render_template('welcome_message')
return (question(welcome_message).reprompt(welcome_message_reprompt))
@ask.intent('YourLocation')
def pass_over(my_location):
geolocator = Nominatim(user_agent='my-application')
print(my_location)
location = geolocator.geocode(my_location,language='en-US')
try:
city = location.address.split(',')[0]
state = location.address.split(',')[2]
country = location.address.split(',')[-1]
location_name = ', '.join([city, state, country])
except IndexError:
location_name = location.address.split(',')[-1]
fly_over = requests.get(
'http://api.open-notify.org/iss-pass.json?lat={}&lon={}'.format(location.latitude, location.longitude))
fly_over = fly_over.json()
if fly_over['message'] == 'success':
rise = fly_over['response'][0]
answer = time.strftime('%A, %B %d, %Y at %I:%M %p GMT', time.localtime(rise.get('risetime')))
a = rise.get('risetime') # last epoch recorded
b = time.time() # current epoch time
c = a - b # returns seconds
hours = c // 3600 % 24
minutes = c // 60 % 60
minutes = int(minutes)
hours = int(hours)
if minutes == 1:
minorminutes = 'minute'
else: minorminutes = 'minutes'
if hours == 1:
hour_or_hours = 'hour'
else: hour_or_hours = 'hours'
if hours == 0:
time_til_rise = "{} {}".format(minutes, minorminutes)
else: time_til_rise = "{} {} and {} {}".format(hours, hour_or_hours, minutes, minorminutes)
else:
answer = "failure"
return statement('the next flyover for {} will begin in {} on {}'.format(location_name, time_til_rise, answer))
@ask.intent('WhereISS')
def share_location():
iss_location, latitude, longitude, distance_miles, ordinal, name, admin1, country_name= where_is_the_iss_now()
latitude, longitude, distance_miles = float(latitude), float(longitude), float(distance_miles)
return statement(iss_location).standard_card(
title="Location of the International Space Station",
text='Latitude {} and Longitude {},\n {} miles {} of {}, {} in {}'.format(round(latitude,2), round(longitude,2), round(distance_miles,0), ordinal, name, admin1, country_name))
@ask.intent('AMAZON.FallbackIntent')
def fallback():
to_continue = render_template('to_continue')
return question('Sorry, I am not sure what you asked me...{}'.format(to_continue))
@ask.intent('AMAZON.NavigateHomeIntent')
def go_home():
return question('et - phone home')
@ask.intent('AMAZON.HelpIntent')
def help_me():
help_me_text = render_template('help')
return question(help_me_text)
@ask.intent('Credits')
def speak_credits():
credits_ = render_template('credits')
return statement(credits_)
@ask.intent('AMAZON.StopIntent')
def stop():
bye_text = render_template('bye')
return statement(bye_text)
@ask.intent('AMAZON.CancelIntent')
def cancel():
bye_text = render_template('bye')
return statement(bye_text)
@ask.session_ended
def session_ended():
return "{}", 200
if __name__ == '__main__':
app.run(debug=True)
|
flexible
|
{
"blob_id": "726f133bcf592315c42f8701be8308422ffbf0d9",
"index": 426,
"step-1": "<mask token>\n\n\ndef where_is_the_iss_now():\n iss_now_website = 'http://api.open-notify.org/iss-now.json'\n webby = requests.get(iss_now_website)\n data = webby.json()\n if data['iss_position']:\n longitude = data['iss_position'].get('longitude')\n latitude = data['iss_position'].get('latitude')\n results = rg.search((latitude, longitude), mode=1)\n lat, lon, name, admin1, admin2, cc = results[0].values()\n ordinal = find_ordinals(city=(float(lat), float(lon)), iss=(float(\n latitude), float(longitude)))\n country_cc = requests.get(\n 'https://pkgstore.datahub.io/core/country-list/data_json/data/8c458f2d15d9f2119654b29ede6e45b8/data_json.json'\n )\n country_cc = country_cc.json()\n iss_coordinates = latitude, longitude\n k_nearest_coordinates = lat, lon\n distance_miles = distance.distance(k_nearest_coordinates, iss_coordinates\n ).miles\n country_name = ''\n for i in filter(lambda d: d.get('Code') == cc, country_cc):\n country_name = i.get('Name')\n location_text = ', '.join([name, admin1, country_name])\n if distance_miles > 150:\n answer = (\n 'The International Space Station is {} miles {} off the coast of {}'\n .format(int(distance_miles), ordinal, location_text))\n else:\n answer = ('the International Space Station is {} miles {} near {}'.\n format(int(distance_miles), ordinal, location_text))\n return (answer, latitude, longitude, distance_miles, ordinal, name,\n admin1, country_name)\n\n\[email protected]('/')\ndef homepage():\n return ''\n\n\[email protected]\ndef start_skill():\n welcome_message_reprompt = render_template('welcome_message_reprompt')\n welcome_message = render_template('welcome_message')\n return question(welcome_message).reprompt(welcome_message_reprompt)\n\n\n<mask token>\n\n\[email protected]('WhereISS')\ndef share_location():\n (iss_location, latitude, longitude, distance_miles, ordinal, name,\n admin1, country_name) = where_is_the_iss_now()\n latitude, longitude, distance_miles = float(latitude), float(longitude\n ), float(distance_miles)\n return statement(iss_location).standard_card(title=\n 'Location of the International Space Station', text=\n \"\"\"Latitude {} and Longitude {},\n {} miles {} of {}, {} in {}\"\"\".\n format(round(latitude, 2), round(longitude, 2), round(\n distance_miles, 0), ordinal, name, admin1, country_name))\n\n\[email protected]('AMAZON.FallbackIntent')\ndef fallback():\n to_continue = render_template('to_continue')\n return question('Sorry, I am not sure what you asked me...{}'.format(\n to_continue))\n\n\n<mask token>\n\n\[email protected]('AMAZON.HelpIntent')\ndef help_me():\n help_me_text = render_template('help')\n return question(help_me_text)\n\n\n<mask token>\n\n\[email protected]('AMAZON.StopIntent')\ndef stop():\n bye_text = render_template('bye')\n return statement(bye_text)\n\n\[email protected]('AMAZON.CancelIntent')\ndef cancel():\n bye_text = render_template('bye')\n return statement(bye_text)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef where_is_the_iss_now():\n iss_now_website = 'http://api.open-notify.org/iss-now.json'\n webby = requests.get(iss_now_website)\n data = webby.json()\n if data['iss_position']:\n longitude = data['iss_position'].get('longitude')\n latitude = data['iss_position'].get('latitude')\n results = rg.search((latitude, longitude), mode=1)\n lat, lon, name, admin1, admin2, cc = results[0].values()\n ordinal = find_ordinals(city=(float(lat), float(lon)), iss=(float(\n latitude), float(longitude)))\n country_cc = requests.get(\n 'https://pkgstore.datahub.io/core/country-list/data_json/data/8c458f2d15d9f2119654b29ede6e45b8/data_json.json'\n )\n country_cc = country_cc.json()\n iss_coordinates = latitude, longitude\n k_nearest_coordinates = lat, lon\n distance_miles = distance.distance(k_nearest_coordinates, iss_coordinates\n ).miles\n country_name = ''\n for i in filter(lambda d: d.get('Code') == cc, country_cc):\n country_name = i.get('Name')\n location_text = ', '.join([name, admin1, country_name])\n if distance_miles > 150:\n answer = (\n 'The International Space Station is {} miles {} off the coast of {}'\n .format(int(distance_miles), ordinal, location_text))\n else:\n answer = ('the International Space Station is {} miles {} near {}'.\n format(int(distance_miles), ordinal, location_text))\n return (answer, latitude, longitude, distance_miles, ordinal, name,\n admin1, country_name)\n\n\[email protected]('/')\ndef homepage():\n return ''\n\n\[email protected]\ndef start_skill():\n welcome_message_reprompt = render_template('welcome_message_reprompt')\n welcome_message = render_template('welcome_message')\n return question(welcome_message).reprompt(welcome_message_reprompt)\n\n\n<mask token>\n\n\[email protected]('WhereISS')\ndef share_location():\n (iss_location, latitude, longitude, distance_miles, ordinal, name,\n admin1, country_name) = where_is_the_iss_now()\n latitude, longitude, distance_miles = float(latitude), float(longitude\n ), float(distance_miles)\n return statement(iss_location).standard_card(title=\n 'Location of the International Space Station', text=\n \"\"\"Latitude {} and Longitude {},\n {} miles {} of {}, {} in {}\"\"\".\n format(round(latitude, 2), round(longitude, 2), round(\n distance_miles, 0), ordinal, name, admin1, country_name))\n\n\[email protected]('AMAZON.FallbackIntent')\ndef fallback():\n to_continue = render_template('to_continue')\n return question('Sorry, I am not sure what you asked me...{}'.format(\n to_continue))\n\n\[email protected]('AMAZON.NavigateHomeIntent')\ndef go_home():\n return question('et - phone home')\n\n\[email protected]('AMAZON.HelpIntent')\ndef help_me():\n help_me_text = render_template('help')\n return question(help_me_text)\n\n\n<mask token>\n\n\[email protected]('AMAZON.StopIntent')\ndef stop():\n bye_text = render_template('bye')\n return statement(bye_text)\n\n\[email protected]('AMAZON.CancelIntent')\ndef cancel():\n bye_text = render_template('bye')\n return statement(bye_text)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef where_is_the_iss_now():\n iss_now_website = 'http://api.open-notify.org/iss-now.json'\n webby = requests.get(iss_now_website)\n data = webby.json()\n if data['iss_position']:\n longitude = data['iss_position'].get('longitude')\n latitude = data['iss_position'].get('latitude')\n results = rg.search((latitude, longitude), mode=1)\n lat, lon, name, admin1, admin2, cc = results[0].values()\n ordinal = find_ordinals(city=(float(lat), float(lon)), iss=(float(\n latitude), float(longitude)))\n country_cc = requests.get(\n 'https://pkgstore.datahub.io/core/country-list/data_json/data/8c458f2d15d9f2119654b29ede6e45b8/data_json.json'\n )\n country_cc = country_cc.json()\n iss_coordinates = latitude, longitude\n k_nearest_coordinates = lat, lon\n distance_miles = distance.distance(k_nearest_coordinates, iss_coordinates\n ).miles\n country_name = ''\n for i in filter(lambda d: d.get('Code') == cc, country_cc):\n country_name = i.get('Name')\n location_text = ', '.join([name, admin1, country_name])\n if distance_miles > 150:\n answer = (\n 'The International Space Station is {} miles {} off the coast of {}'\n .format(int(distance_miles), ordinal, location_text))\n else:\n answer = ('the International Space Station is {} miles {} near {}'.\n format(int(distance_miles), ordinal, location_text))\n return (answer, latitude, longitude, distance_miles, ordinal, name,\n admin1, country_name)\n\n\[email protected]('/')\ndef homepage():\n return ''\n\n\[email protected]\ndef start_skill():\n welcome_message_reprompt = render_template('welcome_message_reprompt')\n welcome_message = render_template('welcome_message')\n return question(welcome_message).reprompt(welcome_message_reprompt)\n\n\[email protected]('YourLocation')\ndef pass_over(my_location):\n geolocator = Nominatim(user_agent='my-application')\n print(my_location)\n location = geolocator.geocode(my_location, language='en-US')\n try:\n city = location.address.split(',')[0]\n state = location.address.split(',')[2]\n country = location.address.split(',')[-1]\n location_name = ', '.join([city, state, country])\n except IndexError:\n location_name = location.address.split(',')[-1]\n fly_over = requests.get(\n 'http://api.open-notify.org/iss-pass.json?lat={}&lon={}'.format(\n location.latitude, location.longitude))\n fly_over = fly_over.json()\n if fly_over['message'] == 'success':\n rise = fly_over['response'][0]\n answer = time.strftime('%A, %B %d, %Y at %I:%M %p GMT', time.\n localtime(rise.get('risetime')))\n a = rise.get('risetime')\n b = time.time()\n c = a - b\n hours = c // 3600 % 24\n minutes = c // 60 % 60\n minutes = int(minutes)\n hours = int(hours)\n if minutes == 1:\n minorminutes = 'minute'\n else:\n minorminutes = 'minutes'\n if hours == 1:\n hour_or_hours = 'hour'\n else:\n hour_or_hours = 'hours'\n if hours == 0:\n time_til_rise = '{} {}'.format(minutes, minorminutes)\n else:\n time_til_rise = '{} {} and {} {}'.format(hours, hour_or_hours,\n minutes, minorminutes)\n else:\n answer = 'failure'\n return statement('the next flyover for {} will begin in {} on {}'.\n format(location_name, time_til_rise, answer))\n\n\[email protected]('WhereISS')\ndef share_location():\n (iss_location, latitude, longitude, distance_miles, ordinal, name,\n admin1, country_name) = where_is_the_iss_now()\n latitude, longitude, distance_miles = float(latitude), float(longitude\n ), float(distance_miles)\n return statement(iss_location).standard_card(title=\n 'Location of the International Space Station', text=\n \"\"\"Latitude {} and Longitude {},\n {} miles {} of {}, {} in {}\"\"\".\n format(round(latitude, 2), round(longitude, 2), round(\n distance_miles, 0), ordinal, name, admin1, country_name))\n\n\[email protected]('AMAZON.FallbackIntent')\ndef fallback():\n to_continue = render_template('to_continue')\n return question('Sorry, I am not sure what you asked me...{}'.format(\n to_continue))\n\n\[email protected]('AMAZON.NavigateHomeIntent')\ndef go_home():\n return question('et - phone home')\n\n\[email protected]('AMAZON.HelpIntent')\ndef help_me():\n help_me_text = render_template('help')\n return question(help_me_text)\n\n\n<mask token>\n\n\[email protected]('AMAZON.StopIntent')\ndef stop():\n bye_text = render_template('bye')\n return statement(bye_text)\n\n\[email protected]('AMAZON.CancelIntent')\ndef cancel():\n bye_text = render_template('bye')\n return statement(bye_text)\n\n\[email protected]_ended\ndef session_ended():\n return '{}', 200\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef where_is_the_iss_now():\n iss_now_website = 'http://api.open-notify.org/iss-now.json'\n webby = requests.get(iss_now_website)\n data = webby.json()\n if data['iss_position']:\n longitude = data['iss_position'].get('longitude')\n latitude = data['iss_position'].get('latitude')\n results = rg.search((latitude, longitude), mode=1)\n lat, lon, name, admin1, admin2, cc = results[0].values()\n ordinal = find_ordinals(city=(float(lat), float(lon)), iss=(float(\n latitude), float(longitude)))\n country_cc = requests.get(\n 'https://pkgstore.datahub.io/core/country-list/data_json/data/8c458f2d15d9f2119654b29ede6e45b8/data_json.json'\n )\n country_cc = country_cc.json()\n iss_coordinates = latitude, longitude\n k_nearest_coordinates = lat, lon\n distance_miles = distance.distance(k_nearest_coordinates, iss_coordinates\n ).miles\n country_name = ''\n for i in filter(lambda d: d.get('Code') == cc, country_cc):\n country_name = i.get('Name')\n location_text = ', '.join([name, admin1, country_name])\n if distance_miles > 150:\n answer = (\n 'The International Space Station is {} miles {} off the coast of {}'\n .format(int(distance_miles), ordinal, location_text))\n else:\n answer = ('the International Space Station is {} miles {} near {}'.\n format(int(distance_miles), ordinal, location_text))\n return (answer, latitude, longitude, distance_miles, ordinal, name,\n admin1, country_name)\n\n\[email protected]('/')\ndef homepage():\n return ''\n\n\[email protected]\ndef start_skill():\n welcome_message_reprompt = render_template('welcome_message_reprompt')\n welcome_message = render_template('welcome_message')\n return question(welcome_message).reprompt(welcome_message_reprompt)\n\n\[email protected]('YourLocation')\ndef pass_over(my_location):\n geolocator = Nominatim(user_agent='my-application')\n print(my_location)\n location = geolocator.geocode(my_location, language='en-US')\n try:\n city = location.address.split(',')[0]\n state = location.address.split(',')[2]\n country = location.address.split(',')[-1]\n location_name = ', '.join([city, state, country])\n except IndexError:\n location_name = location.address.split(',')[-1]\n fly_over = requests.get(\n 'http://api.open-notify.org/iss-pass.json?lat={}&lon={}'.format(\n location.latitude, location.longitude))\n fly_over = fly_over.json()\n if fly_over['message'] == 'success':\n rise = fly_over['response'][0]\n answer = time.strftime('%A, %B %d, %Y at %I:%M %p GMT', time.\n localtime(rise.get('risetime')))\n a = rise.get('risetime')\n b = time.time()\n c = a - b\n hours = c // 3600 % 24\n minutes = c // 60 % 60\n minutes = int(minutes)\n hours = int(hours)\n if minutes == 1:\n minorminutes = 'minute'\n else:\n minorminutes = 'minutes'\n if hours == 1:\n hour_or_hours = 'hour'\n else:\n hour_or_hours = 'hours'\n if hours == 0:\n time_til_rise = '{} {}'.format(minutes, minorminutes)\n else:\n time_til_rise = '{} {} and {} {}'.format(hours, hour_or_hours,\n minutes, minorminutes)\n else:\n answer = 'failure'\n return statement('the next flyover for {} will begin in {} on {}'.\n format(location_name, time_til_rise, answer))\n\n\[email protected]('WhereISS')\ndef share_location():\n (iss_location, latitude, longitude, distance_miles, ordinal, name,\n admin1, country_name) = where_is_the_iss_now()\n latitude, longitude, distance_miles = float(latitude), float(longitude\n ), float(distance_miles)\n return statement(iss_location).standard_card(title=\n 'Location of the International Space Station', text=\n \"\"\"Latitude {} and Longitude {},\n {} miles {} of {}, {} in {}\"\"\".\n format(round(latitude, 2), round(longitude, 2), round(\n distance_miles, 0), ordinal, name, admin1, country_name))\n\n\[email protected]('AMAZON.FallbackIntent')\ndef fallback():\n to_continue = render_template('to_continue')\n return question('Sorry, I am not sure what you asked me...{}'.format(\n to_continue))\n\n\[email protected]('AMAZON.NavigateHomeIntent')\ndef go_home():\n return question('et - phone home')\n\n\[email protected]('AMAZON.HelpIntent')\ndef help_me():\n help_me_text = render_template('help')\n return question(help_me_text)\n\n\[email protected]('Credits')\ndef speak_credits():\n credits_ = render_template('credits')\n return statement(credits_)\n\n\[email protected]('AMAZON.StopIntent')\ndef stop():\n bye_text = render_template('bye')\n return statement(bye_text)\n\n\[email protected]('AMAZON.CancelIntent')\ndef cancel():\n bye_text = render_template('bye')\n return statement(bye_text)\n\n\[email protected]_ended\ndef session_ended():\n return '{}', 200\n\n\n<mask token>\n",
"step-5": "\nfrom flask import Flask, render_template\nfrom flask_ask import Ask, statement, question, session\nimport reverse_geocoder as rg\nfrom geopy import distance\nfrom geopy.geocoders import Nominatim\nimport requests\nimport time\n\n\n'''\n :::::::: ::::::::: ::: :::::::: :::::::::: ::: ::: ::: ::: ::: ::: ::: \n:+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: \n+:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ \n+#++:++#++ +#++:++#+ +#++:++#++: +#+ +#++:++# +#+ +:+ +#+ +#++:++#++: +#+ +#++:++ +#+ \n +#+ +#+ +#+ +#+ +#+ +#+ +#+ +#+#+ +#+ +#+ +#+ +#+ +#+ +#+ +#+ \n#+# #+# #+# #+# #+# #+# #+# #+# #+#+# #+#+# #+# #+# #+# #+# #+# \n ######## ### ### ### ######## ########## ### ### ### ### ########## ### ### ### \n'''\n\n\napp = Flask(__name__)\n\nask = Ask(app, \"/space_walk\")\n\n\ndef find_ordinals(city, iss):\n ''' \n Take tuple coordinates (lat, lon) for City and ISS and\n find the cardinal direction of NE, SE, SW, NW\n '''\n\n if iss[0] - city[0] > 0:\n a = 'North'\n else:\n a = 'South'\n\n if iss[1] - city[1] > 0:\n b = 'East'\n else:\n b = 'West'\n return ''.join([a, b])\n\n\ndef where_is_the_iss_now():\n iss_now_website = 'http://api.open-notify.org/iss-now.json'\n webby = requests.get(iss_now_website)\n data = webby.json()\n\n if data['iss_position']:\n longitude = data['iss_position'].get('longitude')\n latitude = data['iss_position'].get('latitude')\n\n results = rg.search((latitude, longitude), mode=1)\n\n lat, lon, name, admin1, admin2, cc = results[0].values()\n\n ordinal = find_ordinals(city=(float(lat), float(lon)), iss=(float(latitude), float(longitude)))\n\n country_cc = requests.get(\n 'https://pkgstore.datahub.io/core/country-list/data_json/data/8c458f2d15d9f2119654b29ede6e45b8/data_json.json')\n country_cc = country_cc.json()\n\n iss_coordinates = (latitude, longitude)\n k_nearest_coordinates = (lat, lon)\n distance_miles = distance.distance(k_nearest_coordinates, iss_coordinates).miles\n\n country_name = ''\n for i in filter(lambda d: d.get('Code') == cc, country_cc):\n country_name = i.get('Name')\n\n location_text = ', '.join([name, admin1, country_name])\n\n if distance_miles > 150:\n answer = 'The International Space Station is {} miles {} off the coast of {}'.format(int(distance_miles), ordinal,\n location_text)\n else:\n answer = 'the International Space Station is {} miles {} near {}'.format(int(distance_miles),ordinal, location_text)\n return answer, latitude, longitude, distance_miles, ordinal, name, admin1, country_name\n\n\[email protected]('/')\ndef homepage():\n return ''\n\n\[email protected]\ndef start_skill():\n # welcome_message = 'Welcome to the Fleet Feet Journal! What is your name?'\n\n welcome_message_reprompt = render_template('welcome_message_reprompt')\n welcome_message = render_template('welcome_message')\n return (question(welcome_message).reprompt(welcome_message_reprompt))\n\n\[email protected]('YourLocation')\ndef pass_over(my_location):\n\n geolocator = Nominatim(user_agent='my-application')\n print(my_location)\n location = geolocator.geocode(my_location,language='en-US')\n try:\n city = location.address.split(',')[0]\n state = location.address.split(',')[2]\n country = location.address.split(',')[-1]\n location_name = ', '.join([city, state, country])\n except IndexError:\n location_name = location.address.split(',')[-1]\n\n fly_over = requests.get(\n 'http://api.open-notify.org/iss-pass.json?lat={}&lon={}'.format(location.latitude, location.longitude))\n fly_over = fly_over.json()\n\n if fly_over['message'] == 'success':\n rise = fly_over['response'][0]\n answer = time.strftime('%A, %B %d, %Y at %I:%M %p GMT', time.localtime(rise.get('risetime')))\n a = rise.get('risetime') # last epoch recorded\n b = time.time() # current epoch time\n c = a - b # returns seconds\n hours = c // 3600 % 24\n minutes = c // 60 % 60\n minutes = int(minutes)\n hours = int(hours)\n\n if minutes == 1:\n minorminutes = 'minute'\n else: minorminutes = 'minutes'\n\n if hours == 1:\n hour_or_hours = 'hour'\n else: hour_or_hours = 'hours'\n\n if hours == 0:\n time_til_rise = \"{} {}\".format(minutes, minorminutes)\n else: time_til_rise = \"{} {} and {} {}\".format(hours, hour_or_hours, minutes, minorminutes)\n\n else:\n answer = \"failure\"\n return statement('the next flyover for {} will begin in {} on {}'.format(location_name, time_til_rise, answer))\n\n\[email protected]('WhereISS')\ndef share_location():\n\n iss_location, latitude, longitude, distance_miles, ordinal, name, admin1, country_name= where_is_the_iss_now()\n latitude, longitude, distance_miles = float(latitude), float(longitude), float(distance_miles)\n return statement(iss_location).standard_card(\n title=\"Location of the International Space Station\",\n text='Latitude {} and Longitude {},\\n {} miles {} of {}, {} in {}'.format(round(latitude,2), round(longitude,2), round(distance_miles,0), ordinal, name, admin1, country_name))\n\n\[email protected]('AMAZON.FallbackIntent')\ndef fallback():\n to_continue = render_template('to_continue')\n return question('Sorry, I am not sure what you asked me...{}'.format(to_continue))\n\n\[email protected]('AMAZON.NavigateHomeIntent')\ndef go_home():\n return question('et - phone home')\n\n\[email protected]('AMAZON.HelpIntent')\ndef help_me():\n help_me_text = render_template('help')\n return question(help_me_text)\n\n\[email protected]('Credits')\ndef speak_credits():\n credits_ = render_template('credits')\n return statement(credits_)\n\n\[email protected]('AMAZON.StopIntent')\ndef stop():\n bye_text = render_template('bye')\n return statement(bye_text)\n\n\[email protected]('AMAZON.CancelIntent')\ndef cancel():\n bye_text = render_template('bye')\n return statement(bye_text)\n\n\[email protected]_ended\ndef session_ended():\n return \"{}\", 200\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n\n\n\n\n\n",
"step-ids": [
8,
9,
11,
12,
17
]
}
|
[
8,
9,
11,
12,
17
] |
"""
Create a list of words and with it, create a new dictionary
in which the key is the word and the value is the same word
reversed.
"""
word_list = ['Tree','Apple','Snake','flowers']
word_dict = {word:word[::-1] for word in word_list}
print(word_dict)
#Output: {'Tree': 'eerT', 'Apple': 'elppA', 'Snake': 'ekanS', 'flowers': 'srewolf'}
"""
Let's try this one again:
Using the range function, create a sequence of numbers
from 1 to 100, and using the comprehension to return only
those that are multiplies of 2.
"""
use_range = range(1,101)
multiple_list = [i for i in use_range if i%2==0]
print(multiple_list)
"""
[[1, 2, 3, 4], [5, 6, 7, 8]]
Use the list above and create nested comprehensions so that
the final value is a new list like the following
[[2, 4, 6, 8], [10, 12, 14, 16]] The number multiplied by 2
"""
list_above = [[1, 2, 3, 4], [5, 6, 7, 8]]
final_list = [[bottom*2 for bottom in top] for top in list_above]
print(final_list)
|
normal
|
{
"blob_id": "5ac489a2d30155bb92767184ad546247817e28ea",
"index": 1478,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(word_dict)\n<mask token>\nprint(multiple_list)\n<mask token>\nprint(final_list)\n",
"step-3": "<mask token>\nword_list = ['Tree', 'Apple', 'Snake', 'flowers']\nword_dict = {word: word[::-1] for word in word_list}\nprint(word_dict)\n<mask token>\nuse_range = range(1, 101)\nmultiple_list = [i for i in use_range if i % 2 == 0]\nprint(multiple_list)\n<mask token>\nlist_above = [[1, 2, 3, 4], [5, 6, 7, 8]]\nfinal_list = [[(bottom * 2) for bottom in top] for top in list_above]\nprint(final_list)\n",
"step-4": "\"\"\"\nCreate a list of words and with it, create a new dictionary\nin which the key is the word and the value is the same word\nreversed.\n\"\"\"\n\nword_list = ['Tree','Apple','Snake','flowers']\nword_dict = {word:word[::-1] for word in word_list}\nprint(word_dict)\n#Output: {'Tree': 'eerT', 'Apple': 'elppA', 'Snake': 'ekanS', 'flowers': 'srewolf'}\n\n\"\"\"\nLet's try this one again:\nUsing the range function, create a sequence of numbers\nfrom 1 to 100, and using the comprehension to return only\nthose that are multiplies of 2.\n\"\"\"\nuse_range = range(1,101)\nmultiple_list = [i for i in use_range if i%2==0]\nprint(multiple_list)\n\n\n\"\"\"\n[[1, 2, 3, 4], [5, 6, 7, 8]]\nUse the list above and create nested comprehensions so that\nthe final value is a new list like the following\n[[2, 4, 6, 8], [10, 12, 14, 16]] The number multiplied by 2\n\"\"\"\nlist_above = [[1, 2, 3, 4], [5, 6, 7, 8]]\n\nfinal_list = [[bottom*2 for bottom in top] for top in list_above]\nprint(final_list)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import traceback
from functools import partial
import json
import logging
from collections import defaultdict
from itertools import cycle as CycleIter
from datetime import datetime, date, timedelta
from decimal import Decimal
import random
from copy import deepcopy
from math import ceil
import boto3
import bottle
from peewee import fn, SQL, JOIN_LEFT_OUTER, JOIN_INNER, R
from mongoengine import Q
from deprecated.sphinx import deprecated
from bomber.api import (
AccountService,
MessageService,
AuditService,
BillService,
Dashboard,
GoldenEye,
Hyperloop,
Message,
Scout)
from bomber.constant_mapping import (
AutoCallMessageCycle,
ApplicationStatus,
RealRelationship,
BomberCallSwitch,
CallActionCommit,
ApplicantSource,
ApplicationType,
EscalationType,
ApprovalStatus,
AutoListStatus,
AutoCallResult,
BeforeInBomber,
PriorityStatus,
InboxCategory,
OldLoanStatus,
BombingResult,
ContactStatus,
SpecialBomber,
PartnerStatus,
Relationship,
ConnectType,
SubRelation,
PhoneStatus,
ContactType,
SmsChannel,
ContainOut,
FIRSTLOAN,
AppName,
RipeInd,
Cycle,
ContactsUseful,
DisAppStatus,
BomberStatus,
PartnerType)
from bomber.controllers.templates import cs_number_conf
from bomber.controllers.report_calculation.collection_tool import (
average_call_duration_team
)
from bomber.controllers.report_calculation.collection_agent import get_agent
from bomber.db import db, readonly_db
from bomber.models_readonly import (
DispatchAppHistoryR,
AutoCallActionsR,
ConnectHistoryR,
ApplicationR,
CallActionsR,
OverdueBillR,
BomberR)
from bomber.models import (
ManualCallListStatus,
RepaymentReportInto,
OldLoanApplication,
DispatchAppHistory,
CompanyContactType,
FamilyContactType,
ReportCollection,
RepaymentReport,
AutoCallActions,
DispatchAppLogs,
ConnectHistory,
BombingHistory,
ManualCallList,
AutoIVRActions,
SummaryBomber,
SummaryDaily,
IVRCallStatus,
BomberOverdue,
AutoCallList,
AutoIVRStatus,
SystemConfig,
RepaymentLog,
IVRActionLog,
TotalContact,
Application,
CallActions,
DispatchApp,
OverdueBill,
Escalation,
BomberPtp,
WorkerLog,
BomberLog,
CycleList,
Template,
Transfer,
Summary2,
AutoIVR,
Partner,
Contact,
CallLog,
Summary,
Bomber,
Inbox,
Role,
SCI,
)
from bomber.sns import MessageAction, send_to_default_q
from bomber.utils import (
get_cycle_by_overdue_days,
str_no_utc_datetime,
no_utc_datetime,
gender_ktpnum,
list_to_dict,
birth_dt_ktp,
number_strip,
utc_datetime,
OperatedDict,
average_gen,
time_logger,
idg,
)
from bomber.report_work import get_every_cycle_report
app = bottle.default_app()
client = boto3.client('sqs')
#对外展示dict,key-函数名;v-函数数组
actions = {}
def action(msg_action):
action_name = msg_action.value.lower()
if action_name not in actions:
actions[action_name] = []
def wrapper(func):
actions[action_name].append(func)
return func
return wrapper
@action(MessageAction.BOMBER_HEALTH_CHECK)
def health_check(payload, msg_id):
pass
def dpd1_classify(item, lst):
app_name = str(item['app_name']).upper()
key = '{}_{}_DPD1'.format(app_name, str(item['su']))
if key in BeforeInBomber.keys():
lst[key].append(item['id'])
return lst
def dpd1_process(lst):
"""已废弃的方法"""
if not lst:
return
for key, l in lst.items():
rule = getattr(BeforeInBomber, key).value
query = (AutoIVRActions
.select(fn.DISTINCT(AutoIVRActions.loanid))
.where(AutoIVRActions.loanid.in_(l),
AutoIVRActions.group.in_(rule.get('group')),
AutoIVRActions.callstate
.in_(IVRCallStatus.call_success())))
success_set = {i.loanid for i in query}
failed_list = list(set(l) - success_set)
post_params = {
'$and': rule.get('$and'),
'app_list': failed_list
}
resp = Hyperloop().post("/bomber/score/verify", json=post_params)
if not resp.ok:
logging.error(
'hyperloop score verification failed: %s, %s',
str(resp.status_code),
str(resp.text)
)
logging.error('hyperloop score verification failed: %s',
str(post_params))
continue
logging.debug('hyperloop score verification success: %s', resp.content)
resp_json = resp.json()
# dpd1 提前进入bomber
app_list = resp_json['data']
if not app_list:
continue
for item in app_list:
# 做ab_test,三分之一的人提前入催
if random.randint(0, 5) == 1:
send_to_default_q(
MessageAction.APPLICATION_BOMBER,
{'id': int(item)}
)
# auto_ivr,自动外呼系统
@action(MessageAction.GET_IVR)
def get_ivr(payload, msg_id):
logging.warning('start get_ivr')
sys_config = (SystemConfig.select()
.where(SystemConfig.key == 'DPD1-3_INTO_IVR')
.first())
# 得到所有的lid
now = date.today()
# 预期用户不再使用ivr,而是直接进入催收,故修改时间窗口不再获取预期数据
if sys_config and sys_config.value:
start = now - timedelta(days=3)
else:
start = now
end = now + timedelta(days=4)
# TODO: 使用redis
item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()
if not item:
# 开始时清空ivr数据
AutoIVR.delete().execute()
current_page = 0
elif item.current_page >= item.total_page:
return
else:
current_page = item.current_page
#逾期分组 appname + 逾期次数 + 逾期天数
auto_ivr = {
'DanaCepat01': 1,
'DanaCepat00': 2,
'DanaCepat0PDP1': 3,
'PinjamUang01': 4,
'PinjamUang00': 5,
'PinjamUang0PDP1': 6,
'KtaKilat01': 7,
'KtaKilat00': 8,
'KtaKilat0PDP1': 9,
'DanaCepat11': 10,
'DanaCepat10': 11,
'DanaCepat1PDP1': 12,
'PinjamUang11': 13,
'PinjamUang10': 14,
'PinjamUang1PDP1': 15,
'KtaKilat11': 16,
'KtaKilat10': 17,
'KtaKilat1PDP1': 18,
'DanaCepat0PDP2': 19,
'DanaCepat0PDP3': 20,
'DanaCepat03': 21,
'PinjamUang0PDP2': 22,
'PinjamUang0PDP3': 23,
'PinjamUang03': 24,
'KtaKilat0PDP2': 25,
'KtaKilat0PDP3': 26,
'KtaKilat03': 27,
'DanaCepat1PDP2': 28,
'DanaCepat1PDP3': 29,
'PinjamUang1PDP2': 30,
'PinjamUang1PDP3': 31,
'KtaKilat1PDP2': 32,
'KtaKilat1PDP3': 33,
'DanaCepat13': 36,
'PinjamUang13': 37,
'KtaKilat13': 38,
'DanaCepat12': 39,
'PinjamUang12': 40,
'KtaKilat12': 41,
'DanaCepat02': 42,
'PinjamUang02': 43,
'KtaKilat02': 44,
'IKIDana01': 100,
'IKIDana00': 101,
'IKIDana0PDP1': 102,
'IKIDana11': 103,
'IKIDana10': 104,
'IKIDana1PDP1': 105,
'IKIDana0PDP2': 106,
'IKIDana0PDP3': 107,
'IKIDana03': 108,
'IKIDana1PDP2': 109,
'IKIDana1PDP3': 110,
'IKIDana13': 111,
'IKIDana12': 112,
'IKIDana02': 113,
}
current_page += 1
with db.atomic() as transaction:
while True:
bill_service = BillService()
#获取当天到未来4天的到期bill_sub.origin_due_at
ivr_action = bill_service.ivr_pages(
page=current_page,
page_size=500,
start_time=utc_datetime(str(start)),
end_time=utc_datetime(str(end)))
result = ivr_action['result']
page_size = int(ivr_action.get('page_size', 0))
total_page = int(ivr_action.get('total_page', 0))
insert_args = []
for a in result:
due_at = no_utc_datetime(a['due_at'])
days = (due_at.date() - now).days
if days == 2:
continue
if days > 0:
time = str(days)
else:
# 上面通过时间控制了请求的数据,不会获取到逾期为两天的件
time = str(days).replace('-', 'PDP')
#su 该用户逾期多少次
key = a['app_name'] + str(a['su']) + time
group = auto_ivr.get(key)
user_id = a['user_id']
try:
user_resp = (AccountService()
.get_user(path_params={'user_id': user_id}))
if str(user_resp['mobile_no']) == str(a['user_mobile_no']):
numbers = a['user_mobile_no']
else:
numbers = (a['user_mobile_no'] +
',' + user_resp.get('mobile_no'))
except:
logging.error('Request Account Service Error.')
numbers = a['user_mobile_no']
insert_args.append({
'application_id': a['id'],
'numbers': numbers,
'group': group,
'user_id': user_id})
AutoIVR.insert_many(insert_args).execute()
if current_page == 1:
IVRActionLog.create(total_page=total_page,
proc_date=now,
page_size=page_size,
current_page=current_page)
# 不知道什么原因,此处create不返回刚创建的对象
item = IVRActionLog.get(IVRActionLog.proc_date == now)
else:
item.current_page = current_page
item.page_size = page_size
item.total_page = total_page
item.save()
transaction.commit()
current_page += 1
if current_page > int(total_page):
break
# try:
# ivr_t2_test()
# except Exception as e:
# logging.error("ivr_test_error:%s"%str(e))
if sys_config and sys_config.value:
try:
classfiy_dpd_ptp_apps()
except Exception as e:
logging.error("dpd1-3_test_error:%s"%str(e))
# t-2进ivr测试代码
def ivr_t2_test():
t2_groups = [39, 40, 41, 42, 43, 44]
ivr_test_proportion = 0.2
sys_config = (SystemConfig.select()
.where(SystemConfig.key == 'IVR_TEST_PROPORTION')
.first())
if sys_config and sys_config.value:
ivr_test_proportion = float(sys_config.value)
# 获取所有t-2的件
t2_ivrs = (AutoIVR.select()
.where(AutoIVR.group << t2_groups,
AutoIVR.status == AutoIVRStatus.AVAILABLE.value))
t2_dict = defaultdict(list)
# 每个group获取一定比例的件
for ivr in t2_ivrs:
t2_dict[ivr.group].append(ivr.id)
test_ivr_ids = []
for group, ivr_ids in t2_dict.items():
number = ceil(len(ivr_ids) * ivr_test_proportion)
test_ivr_ids += ivr_ids[:number]
if not test_ivr_ids:
return
# 更新ivr状态
q = (AutoIVR.update(status=AutoIVRStatus.SUCCESS.value)
.where(AutoIVR.group << t2_groups,
AutoIVR.id.not_in(test_ivr_ids))
.execute())
# 过滤到bomber中下p的件
def classfiy_dpd_ptp_apps():
dpd_group = AutoIVR.dpd_groups()
dpd1_3_ivr_pro = 0.2
sys_config = (SystemConfig.select()
.where(SystemConfig.key == 'DPD1-3_IVR_TEST')
.first())
if sys_config and sys_config.value:
dpd1_3_ivr_pro = float(sys_config.value)
# 获取有是有已经下p的件
apps = (ApplicationR.select(ApplicationR.external_id)
.where(ApplicationR.overdue_days < 4,
ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.promised_date >= date.today(),
ApplicationR.promised_date.is_null(False)))
apps_ids = [a.external_id for a in apps]
# 删除ivr中下p的件
if apps_ids:
d = (AutoIVR.delete()
.where(AutoIVR.application_id.in_(apps_ids),
AutoIVR.group.in_(dpd_group))
.execute())
# 所有dpd1-3的件
ivrs = (AutoIVR.select().where(AutoIVR.group.in_(dpd_group)))
ivrs_dict = defaultdict(list)
for ivr in ivrs:
ivrs_dict[ivr.group].append(ivr.id)
test_ivrs = []
for group, ivr_ids in ivrs_dict.items():
number = ceil(len(ivr_ids) * dpd1_3_ivr_pro)
test_ivrs += ivr_ids[:number]
if not test_ivrs:
return
# 更新不测试的数据的状态
q = (AutoIVR.update(status=AutoIVRStatus.SUCCESS.value)
.where(AutoIVR.group.in_(dpd_group),
AutoIVR.id.not_in(test_ivrs))
.execute())
# APP 合并特殊处理
@action(MessageAction.APP_MERGE)
@deprecated(version='1.0', reason='This function will be removed soon')
def app_merge(payload, msg_id):
# 将DPD未到4的提前拉近bomber
sql = """
select *
from (
select a.id as id
from dashboard.application as a
inner join repayment.bill2 as b on b.external_id = a.id
where not exists (
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.app = 'DanaCepat'
and a.is_first_loan = 1
and a.apply_at < '2018-08-23 20:50:00'
and b.overdue_days between 1 and 3
and b.status != 2) result
where not exists (
select 1
from bomber.application as a
where a.cycle = 1
and a.status = 4
and a.id = result.id
)
"""
cursor = readonly_db.get_cursor()
cursor.execute(sql)
new_data = cursor.fetchall()
cursor.close()
if new_data:
bomber = [103, 104]
for d in new_data:
app_id = {'id': d[0]}
application_overdue(app_id, None)
# 将新进的件随机分给对应催收员
(Application
.update(status=ApplicationStatus.AB_TEST.value,
latest_bomber=random.choice(bomber),
ptp_bomber=None
)
.where(Application.id == d[0])
).execute()
logging.warning('add new app success')
# 重新登陆后,且没有ptp,将其从人工催收中删除
ptp = date.today() - timedelta(days=1)
del_sql = """
select a.id
from bomber.application as a
where exists(
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.cycle = 1
and a.status = 4
and (a.promised_date is null or a.promised_date < "%s")
""" % ptp
cursor = readonly_db.get_cursor()
cursor.execute(del_sql)
del_date = cursor.fetchall()
cursor.close()
if del_date:
return
ids = list()
for d in del_date:
ids.append(d[0])
(Application
.update(status=ApplicationStatus.UNCLAIMED.value,
latest_bomber=None)
.where(Application.id << ids)).execute()
@action(MessageAction.APPLICATION_BOMBER)
def application_overdue(payload, msg_id):
application_id = payload['id']
sub_bill_id = payload['bill_sub_id']
local_app = (Application.select()
.where(Application.external_id == application_id)
.order_by(Application.finished_at)
.first())
# 如果是单期且催收单存在
if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:
logging.info('application %s overdue, already exists', application_id)
add_contact(local_app)
return
# 如果是分期,查看子账单是否存在
if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:
overdue_bill = (OverdueBillR.select()
.where(OverdueBillR.sub_bill_id == sub_bill_id,
OverdueBillR.external_id == application_id))
if overdue_bill.exists():
logging.info(
'application %s,sub_bill_id %s overdue, already exists' %
(application_id, sub_bill_id))
return
try:
sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
sub_bill = sub_bill[0]
except Exception:
logging.error('application %s overdue, get sub_bill info failed:'
'Request To repayment Error', application_id)
return
if sub_bill['status'] == 2:
logging.error('application %s overdue, but bills already cleared',
application_id)
return
overdue_days = sub_bill.get('overdue_days', 0)
if overdue_days == 0:
logging.info('application {} no overdue'
.format(str(application_id)))
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
logging.error('get application %s failed: Request to GoldenEye.',
application_id)
return
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
apply_history = Dashboard().get('/users/%s/apply-history' % user_id)
if not apply_history.ok:
logging.error('get user %s apply history failed: Request '
'to Dashboard Failed.', user_id)
return
history = apply_history.json().get('data')
loan_success_times = len([1 for i in history
if i['status'] in [80, 90, 100, 70] and
i['id'] != gold_app['id']])
id = application_id
type = ApplicationType.CASH_LOAN.value
bill_id = sub_bill.get("bill_id")
amount = sub_bill.get("amount")
amount_net = sub_bill.get('amount_net')
interest_rate = sub_bill.get('interest_rate')
overdue_days = sub_bill.get('overdue_days')
origin_due_at = sub_bill.get('origin_due_at')
sub_overdue_bill = {
"collection_id": id,
"bill_id": bill_id,
"sub_bill_id": sub_bill_id,
"periods": sub_bill.get("periods"),
"overdue_days": overdue_days,
"origin_due_at": origin_due_at,
"amount": amount,
"amount_net": amount_net,
"interest_rate": interest_rate,
"external_id": application_id
}
# 根据催收单类型来生成id
if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:
if local_app and local_app.status != ApplicationStatus.REPAID.value:
sub_overdue_bill["collection_id"] = local_app.id
local_app.amount += amount
local_app.amount_net += amount_net
local_app.save()
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info(
"application %s,sub_bill_id:%s overdue created" %
(application_id, sub_bill_id))
return
else:
id = idg()
type = ApplicationType.CASH_LOAN_STAGING.value
sub_overdue_bill["collection_id"] = id
ptp_info = BombingHistory.filter(BombingHistory.application == id).first()
promised_amount = ptp_info and ptp_info.promised_amount
promised_date = ptp_info and ptp_info.promised_date
application = Application.create(
id=id,
user_id=gold_app['user_id'],
user_mobile_no=gold_app['user_mobile_no'],
user_name=gold_app['id_name'],
app=gold_app['app'],
device_no=gold_app['device_no'],
contact=json.dumps(gold_app.get('contact')),
apply_at=gold_app.get('apply_date'),
id_ektp=gold_app.get('id_ektp'),
birth_date=birth_dt_ktp(gold_app.get('id_ektp')),
gender=gender_ktpnum(gold_app.get('id_ektp')),
profile_province=(gold_app.get('profile_province') or {}).get('name'),
profile_city=(gold_app.get('profile_city') or {}).get('name'),
profile_district=(gold_app.get('profile_district') or {}).get('name'),
profile_residence_time=gold_app.get('profile_residence_time'),
profile_residence_type=gold_app.get('profile_residence_type'),
profile_address=gold_app.get('profile_address'),
profile_education=gold_app.get('profile_education'),
profile_college=(gold_app.get('profile_college') or {}).get('name'),
job_name=gold_app.get('job_name'),
job_tel=gold_app.get('job_tel'),
job_bpjs=gold_app.get('job_bpjs'),
job_user_email=gold_app.get('job_user_email'),
job_type=gold_app.get('job_type'),
job_industry=gold_app.get('job_industry'),
job_department=gold_app.get('job_department'),
job_province=(gold_app.get('job_province') or {}).get('name'),
job_city=(gold_app.get('job_city') or {}).get('name'),
job_district=(gold_app.get('job_district') or {}).get('name'),
job_address=gold_app.get('job_address'),
amount=amount,
amount_net=amount_net,
interest_rate=interest_rate,
# late_fee_rate=bill.get('late_fee_rate'),
# late_fee_initial=late_fee_initial,
# late_fee=late_fee,
# interest=interest,
term=gold_app.get('term'),
origin_due_at=origin_due_at,
# due_at=bill.get('due_at'),
overdue_days=overdue_days,
repay_at=sub_bill.get('repay_at'),
# principal_paid=principal_paid,
# late_fee_paid=late_fee_paid,
# repaid=repaid,
# unpaid=unpaid,
loan_success_times=loan_success_times,
arrived_at=datetime.now(),
follow_up_date=datetime.now(),
promised_amount=promised_amount,
promised_date=promised_date,
external_id=application_id,
type=type,
bill_id=bill_id,
dpd1_entry=datetime.now()
)
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('overdue application %s created', application_id)
# new overdue application equals to 'escalate from 0 to 1'
Escalation.create(
application=id,
type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value,
current_cycle=0,
escalate_to=1,
)
add_contact(application)
def add_contact(application):
logging.info('start add contact for application: %s', application.id)
# 添加联系人信息
contacts = Contact.filter(
Contact.user_id == application.user_id,
)
existing_numbers = {contact.number for contact in contacts}
insert_contacts = list()
mon_insert_contact = {}
# applicant
user_mobile_no = number_strip(application.user_mobile_no)
if user_mobile_no and user_mobile_no not in existing_numbers:
insert_contacts.append({
'user_id': application.user_id,
'name': application.user_name,
'number': user_mobile_no,
'relationship': Relationship.APPLICANT.value,
'source': 'apply info',
'real_relationship': Relationship.APPLICANT.value
})
existing_numbers.add(number_strip(application.user_mobile_no))
extra_phone = GoldenEye().get(
'/users/%s/extra-phone' % application.user_id
)
if not extra_phone.ok:
extra_phone = []
logging.error('get user %s extra contacts failed',
application.user_id)
else:
extra_phone = extra_phone.json()['data']
if extra_phone:
for i in extra_phone:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({
'user_id': application.user_id,
'name': application.user_name,
'number': number,
'relationship': Relationship.APPLICANT.value,
'source': 'extra phone',
'real_relationship': Relationship.APPLICANT.value
})
key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
# family
# ec contact
ec_contact = []
contact = json.loads(application.contact or '[]')
for i in contact:
if (number_strip(i['mobile_no']) not in existing_numbers and
number_strip(i['mobile_no'])):
ec_contact.append({
'user_id': application.user_id,
'name': i['name'],
'number': number_strip(i['mobile_no']),
'relationship': Relationship.FAMILY.value,
'sub_relation': SubRelation.EC.value,
'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value
})
key = (user_mobile_no,
number_strip(i['mobile_no']),
ContactType.F_EC.value)
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['mobile_no']))
if i['type'] != 1:
continue
if (number_strip(i['tel_no']) not in existing_numbers and
number_strip(i['tel_no'])):
ec_contact.append({
'user_id': application.user_id,
'name': i['name'],
'number': number_strip(i['tel_no']),
'relationship': Relationship.FAMILY.value,
'sub_relation': SubRelation.EC.value,
'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value
})
key = (user_mobile_no,
number_strip(i['tel_no']),
ContactType.F_EC.value)
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['tel_no']))
if ec_contact:
Contact.insert_many(ec_contact).execute()
# company
if all((application.job_tel,
number_strip(application.job_tel),
number_strip(application.job_tel) not in existing_numbers)):
insert_contacts.append({
'user_id': application.user_id,
'name': None,
'number': number_strip(application.job_tel),
'relationship': Relationship.COMPANY.value,
'source': 'basic info job_tel',
'real_relationship': Relationship.COMPANY.value
})
key = (user_mobile_no,
number_strip(application.job_tel),
ContactType.C_BASIC_INFO_JOB_TEL.value)
mon_insert_contact[key] = 1, 0, None
existing_numbers.add(number_strip(application.job_tel))
# suggested
sms_contacts = GoldenEye().get(
'/applications/%s/sms-contacts' % application.external_id
)
if not sms_contacts.ok:
sms_contacts = []
logging.info('get user %s sms contacts failed', application.external_id)
else:
sms_contacts = sms_contacts.json()['data']
if sms_contacts:
for i in sms_contacts:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'][:128],
'number': number,
'relationship': Relationship.SUGGESTED.value,
'source': 'sms contacts',
'real_relationship': Relationship.SUGGESTED.value
})
key = (user_mobile_no,
number,
ContactType.S_SMS_CONTACTS.value)
mon_insert_contact[key] = 1, 0, i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
cf = GoldenEye().get(
'/applications/%s/call/frequency' % application.external_id
)
if not cf.ok:
call_frequency = []
logging.error('get application %s call frequency error',
application.external_id)
else:
call_frequency = cf.json()['data']
# 结构不一样,重新生成
insert_contacts = []
fm = GoldenEye().get(
'/applications/%s/contact/family-member' % application.external_id
)
if not fm.ok:
family = []
logging.error('get application %s family-member info error',
application.external_id)
else:
family = fm.json()['data']
if family:
for i in family:
if not (i.get('number')):
logging.info('family member %s' % str(i))
continue
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
logging.info('family members: %s' % str(i))
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'][:128],
'number': number,
'relationship': Relationship.FAMILY.value,
'source': FamilyContactType.CALLEC.value,
'total_count': i.get('total_count', 1),
'total_duration': i.get('total_duration', 0),
'real_relationship': Relationship.FAMILY.value
})
key = user_mobile_no, number, ContactType.F_CALL_EC.value
mon_insert_contact[key] = (i.get('total_count', 1),
i.get('total_duration', 0),
i['name'][:128])
existing_numbers.add(number)
mon_update_contact = {}
if call_frequency:
with db.atomic():
count = 1
for i in call_frequency:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
(Contact
.update(total_count=i['total_count'],
total_duration=i['total_duration'])
.where(Contact.number == number,
Contact.user_id == application.user_id))
key = user_mobile_no, number
mon_update_contact[key] = (i['total_count'],
i['total_duration'])
continue
# 设置通话频率最多的五个为family member
if count < 6:
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'][:128],
'number': number,
'relationship': Relationship.FAMILY.value,
'total_count': i['total_count'],
'total_duration': i['total_duration'],
'source': FamilyContactType.CALLTOP5.value,
'real_relationship': Relationship.FAMILY.value
})
count += 1
key = user_mobile_no, number, ContactType.F_CALL_TOP5.value
mon_insert_contact[key] = (i['total_count'],
i['total_duration'],
i['name'][:128])
else:
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'][:128],
'number': number,
'relationship': Relationship.SUGGESTED.value,
'total_count': i['total_count'],
'total_duration': i['total_duration'],
'source': 'call frequency',
'real_relationship': Relationship.SUGGESTED.value
})
key = (user_mobile_no,
number,
ContactType.S_CALL_FREQUENCY.value)
mon_insert_contact[key] = (i['total_count'],
i['total_duration'],
i['name'][:128])
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
# 信用认证号码加入到本人
next_apply_list = (AccountService().add_contact(application.user_id))
for next_apply in next_apply_list:
number = number_strip(str(next_apply))[:64]
if number and number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=application.user_name,
number=number,
relationship=Relationship.SUGGESTED.value,
source='online profile phone',
real_relationship=Relationship.SUGGESTED.value
)
key = (user_mobile_no,
number,
ContactType.S_ONLINE_PROFILE_PHONE.value)
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
# 双卡手机另一个号码加入到本人队列
next_applicant = GoldenEye().get(
'/bomber/%s/dual_contact' % application.user_id
)
if not next_applicant.ok:
next_applicant = []
logging.error('get user %s dual_contact contacts failed'
% application.user_id)
else:
next_applicant = next_applicant.json()['data']
if next_applicant:
for i in next_applicant:
number = number_strip(str(i))[:64]
if number and number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=application.user_name,
number=number,
relationship=Relationship.APPLICANT.value,
source='apply info',
real_relationship=Relationship.APPLICANT.value
)
key = user_mobile_no, number, ContactType.A_APPLY_INFO.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
# add new contact
# 将同个ktp注册的多个号码添加到本人
numbers = []
try:
numbers = (AccountService()
.ktp_number(path_params={'user_id': application.user_id}))
except Exception as e:
logging.info('request ktp numbers failed %s' % str(e))
for n in numbers:
number = number_strip(str(n))[:64]
if number and number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=application.user_name,
number=number,
relationship=Relationship.APPLICANT.value,
source='ktp number',
real_relationship=Relationship.APPLICANT.value
)
key = (user_mobile_no,
number,
ContactType.A_KTP_NUMBER.value)
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success'
% application.user_id)
# 将contact表中is_family为true的标记为ec
try:
ecs = GoldenEye().get(
'/applications/%s/contact/ec' % application.external_id
)
except Exception as e:
logging.info('request ec-member error: %s' % str(e))
try:
if not ecs.ok:
ec = []
logging.info('get application %s ec-member info error',
application.external_id)
else:
ec = ecs.json()['data']
if ec:
for e in ec:
number = number_strip(e['numbers'])[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=e['name'][:128],
number=number,
relationship=Relationship.FAMILY.value,
source=FamilyContactType.CONTACTEC.value,
real_relationship=Relationship.FAMILY.value
)
key = (user_mobile_no,
number,
ContactType.F_CONTACT_EC.value)
mon_insert_contact[key] = 1, 0, e['name'][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add ec_member error:%s' % str(e))
# 将contact中is_me标记为true的标记为本人
try:
mn = GoldenEye().get(
'/applications/%s/contact/my_number' % application.external_id
)
except Exception as e:
logging.info('request my_number error: %s' % str(e))
try:
if not mn.ok:
my = []
logging.info('get application %s my_number info error',
application.external_id)
else:
my = mn.json()['data']
if my:
for m in my:
number = number_strip(m)[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=my[m][:128],
number=number,
relationship=Relationship.SUGGESTED.value,
source='my number',
real_relationship=Relationship.SUGGESTED.value
)
key = user_mobile_no, number, ContactType.S_MY_NUMBER.value
mon_insert_contact[key] = 1, 0, my[m][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add my_member error:%s' % str(e))
# 得到company的号码
try:
cn = GoldenEye().get(
'/applications/%s/contact/company-number' % application.external_id
)
except Exception as e:
logging.info('request company-number error: %s' % str(e))
try:
if not cn.ok:
cn = []
logging.info('get application %s company_number info error',
application.external_id)
else:
cn = cn.json()['data']
if cn:
for c in cn:
number = c
if not number:
continue
if number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=cn[c][:128],
number=number,
relationship=Relationship.COMPANY.value,
source='company',
real_relationship=Relationship.COMPANY.value
)
key = user_mobile_no, number, ContactType.C_COMPANY.value
mon_insert_contact[key] = 1, 0, cn[c][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add company_member error:%s' % str(e))
# 得到本人在其他设备上登陆的sim联系方式,加入applicant中
try:
ol = (AccountService()
.other_login_contact(userId=application.user_id))
except Exception as e:
logging.error('request other_login error: %s' % e)
ol = {}
try:
for o in ol:
number = number_strip(o)
if not number:
continue
if number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=ol[o][:128],
number=number,
relationship=Relationship.SUGGESTED.value,
source='other_login',
real_relationship=Relationship.SUGGESTED.value
)
key = (user_mobile_no,
number,
ContactType.S_OTHER_LOGIN.value)
mon_insert_contact[key] = 1, 0, ol[o][:128]
except Exception as e:
logging.error('add other_login number error:%s' % e)
logging.info('add contact for application %s finished', application.id)
if mon_insert_contact or mon_update_contact:
send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON,
{
'user_mobile_no': user_mobile_no,
'insert_contact': str(mon_insert_contact),
'update_contact': str(mon_update_contact),
'user_id': application.user_id,
'name': application.user_name
})
@action(MessageAction.IMPORT_CONTACT_TO_MON)
def import_contact_to_mon(payload, msg_id):
user_mobile_no = payload['user_mobile_no']
insert_contact = eval(payload['insert_contact'])
update_contact = eval(payload['update_contact'])
user_id = payload['user_id']
name = payload['name']
if not (insert_contact or update_contact or user_mobile_no):
logging.error("Invalid params")
drop_duplicated_contact({'numbers': [user_mobile_no]}, None)
send_to_default_q(MessageAction.CONTACT_FROM_TOTAL, {
'number': user_mobile_no,
'user_id': user_id
})
return
contacts = TotalContact.objects(src_number=user_mobile_no, is_calc=False)
insert_list = []
for c in contacts:
key = (user_mobile_no, c.dest_number, c.source)
if key in insert_contact:
insert_contact.pop(key)
for (sn, dn, s), (tc, td, na) in insert_contact.items():
insert_list.append({
'src_number': sn,
'src_name': name,
'dest_number': dn,
'dest_name': na,
'source': s,
'total_count': tc,
'total_duration': td
})
if insert_list:
insert_count = len((TotalContact
.objects
.insert([TotalContact(**dct)
for dct in insert_list])))
logging.info("insert success %s", insert_count)
update_count = 0
for (sn, dn), (tc, td) in update_contact.items():
result = (TotalContact
.objects(src_number=sn, dest_number=dn, is_calc=False)
.update(total_count=tc, total_duration=td))
if result:
update_count += 1
logging.info("update success %s", update_count)
drop_duplicated_contact({'numbers': [user_mobile_no]}, None)
send_to_default_q(MessageAction.CONTACT_FROM_TOTAL, {
'number': user_mobile_no,
'user_id': user_id
})
@action(MessageAction.DROP_DUPLICATED_CONTACT)
def drop_duplicated_contact(payload, msg_id):
"""
total_count,total_duration去重时,先total_count, 后total_duration
:param payload:
:param msg_id:
:return:
"""
numbers = payload.get('numbers', [])
if not numbers:
logging.error("no numbers should drop")
query = (TotalContact
.objects(Q(src_number__in=numbers) | Q(dest_number__in=numbers)))
contact_list = defaultdict(list)
delete_list = []
insert_list = []
for c in query:
if c.src_number == c.dest_number:
delete_list.append(c.id)
key = c.src_number, c.dest_number, c.source
contact_list[key].append({
'id': c.id,
'src_number': c.src_number,
'dest_number': c.dest_number,
'total_count': c.total_count,
'total_duration': c.total_duration,
'is_calc': c.is_calc,
'source': c.source,
'src_name': c.src_name,
'dest_name': c.dest_name
})
contact_list2 = deepcopy(contact_list)
for key, info in contact_list.items():
_info = sorted(info,
key=lambda x: (not x['is_calc'],
x['total_count'],
x['total_duration']),
reverse=True)
rs = _info[0]
if not rs['is_calc']:
contact_list2[(key[1], key[0], key[2])].append({
'src_number': rs['dest_number'],
'dest_number': rs['src_number'],
'total_count': rs['total_count'],
'total_duration': rs['total_duration'],
'is_calc': True,
'source': rs['source'],
'id': '',
'src_name': rs['dest_name'],
'dest_name': rs['src_name']
})
delete_ids = [i['id'] for i in _info[1:] if i['id']]
delete_list.extend(delete_ids)
for key, info in contact_list2.items():
_info = sorted(info,
key=lambda x: (not x['is_calc'],
x['total_count'],
x['total_duration']),
reverse=True)
rs = _info[0]
# 第一轮已经把不是反转的号码全部刷过
if not rs['is_calc']:
continue
if not rs['id']:
rs.pop('id')
insert_list.append(rs)
delete_ids = [i['id'] for i in _info[1:] if i['id']]
delete_list.extend(delete_ids)
if delete_list:
delete_count = TotalContact.objects(id__in=delete_list).delete()
logging.info("numbers %s: delete success %s", numbers, delete_count)
if insert_list:
insert_count = len((TotalContact
.objects
.insert([TotalContact(**dct)
for dct in insert_list])))
logging.info("numbers %s: insert success %s", numbers, insert_count)
def get_contact_from_mongo(number):
if not number:
return []
query = (TotalContact
.objects(src_number=number,
source__in=TotalContact.available())
.order_by('source'))
lst = []
for c in query:
relation = TotalContact.relationship(c.source)
if relation == -1:
continue
source = TotalContact.str_source(c.source)
if not source:
continue
lst.append({
'related_number': c.dest_number,
'source': source,
'is_calc': c.is_calc,
'total_count': c.total_count,
'total_duration': c.total_duration,
'relation': relation,
'name': c.dest_name
})
return lst
@action(MessageAction.CONTACT_FROM_TOTAL)
def contact_from_total(payload, msg_id):
number = payload.get('number')
user_id = payload.get('user_id')
if not (number and user_id):
logging.error("Invalid params")
return
result = get_contact_from_mongo(number)
if not result:
logging.error("contact from mongo is none")
return
contacts = Contact.filter(Contact.user_id == user_id)
existing_numbers = {contact.number for contact in contacts}
contact_list = []
for c in result:
number = number_strip(c['related_number'])
if number in existing_numbers:
continue
contact_list.append({
'user_id': user_id,
'name': c['name'],
'number': number,
'relationship': c['relation'],
'source': c['source'],
'total_duration': c['total_duration'],
'total_count': c['total_count'],
'real_relationship': c['relation']
})
existing_numbers.add(number)
if contact_list:
Contact.insert_many(contact_list).execute()
@action(MessageAction.BILL_REVOKE)
def bill_revoke(payload, msg_id):
application_id = payload['external_id']
if 'bill_sub_id' not in payload:
bill_revoke_old(application_id)
return
# 子账单id
sub_bill_id = payload['bill_sub_id']
# java中还款时的唯一标志
partner_bill_id = payload['partner_bill_id']
application = (Application
.filter(Application.external_id == application_id).first())
if application.type == ApplicationType.CASH_LOAN_STAGING.value:
# 根据子账单获取催收单的id
application = (Application.select(Application)
.join(OverdueBill,JOIN_LEFT_OUTER,
on = Application.id == OverdueBill.collection_id)
.where(OverdueBill.external_id == application_id,
OverdueBill.sub_bill_id == sub_bill_id)
.first())
if not application:
logging.info('application %s paid, not found application',
application_id)
return
try:
bill = BillService().sub_bill_list(bill_sub_ids = [sub_bill_id])
bill = bill[0]
except Exception:
logging.error('application %s overdue, get bill info failed: '
'Request To Repayment Error', application_id)
raise RuntimeError('Get repayment bills failed. {}'
.format(str(application.id)))
if bill.get('overdue_days') > 0 and bill.get('status') != 2:
Application.update(
status=ApplicationStatus.UNCLAIMED.value
).where(Application.id == application.id).execute()
# 获取子账单
overdue_bill = (OverdueBill
.filter(OverdueBill.external_id == application_id,
OverdueBill.sub_bill_id == sub_bill_id)
.first())
if not overdue_bill:
logging.info("not find overdue_bill,sub_bill_id:%s,appid:%s" %
(sub_bill_id, application_id))
return
if overdue_bill.status == ApplicationStatus.REPAID.value:
overdue_bill.status = ApplicationStatus.UNCLAIMED.value
overdue_bill.finished_at = None
overdue_bill.save()
# 还款记录要置为无效
RepaymentLog.update(
no_active = 1
).where(RepaymentLog.partner_bill_id == partner_bill_id,
RepaymentLog.overdue_bill_id == overdue_bill.id).execute()
# 老数据消息处理
def bill_revoke_old(external_id):
application = (Application.select()
.where(Application.id == external_id)
.first())
if not application:
logging.info("not get application")
return
try:
bill = BillService().bill_dict(
application_id=external_id)
except Exception:
logging.error('application %s overdue, get bill info failed: '
'Request To Repayment Error', external_id)
return
if bill.get('overdue_days') >0 and bill.get("status") != 2:
q = (Application
.update(status=ApplicationStatus.UNCLAIMED.value,
repay_at=bill.get('repay_at'))
.where(Application.id == external_id).execute())
p = (OverdueBill.update(status=ApplicationStatus.UNCLAIMED.value)
.where(OverdueBill.collection_id == external_id).execute())
return
def check_key_not_none(payload, keys):
for key in keys:
if payload.get(key) is None:
logging.error('Missing args {}'.format(str(key)))
return False
return True
# 还款
@action(MessageAction.BILL_PAID)
def bill_paid(payload, msg_id):
# Don't use validator, it will throw exception
validate = check_key_not_none(payload,
['external_id', 'late_fee_part',
'principal_part', 'paid_at','bill_sub_id',
'partner_bill_id'])
if not validate:
logging.error('payload key not fully pass in.')
return
external_id = payload['external_id']
late_fee_part = Decimal(payload['late_fee_part'])
principal_part = Decimal(payload['principal_part'])
paid_at = payload['paid_at']
partner_bill_id = payload['partner_bill_id']
logging.debug('application %s paid principal part %s, paid late fee '
'part %s', external_id, principal_part, late_fee_part)
application = (Application
.filter(Application.external_id == external_id)
.order_by(-Application.created_at)
.first())
if not application:
logging.info('application %s paid, not found application',external_id)
return
# 获取期数
sub_bill_id = payload['bill_sub_id']
overdue_bill = (OverdueBillR.select()
.where(OverdueBillR.collection_id == application.id,
OverdueBillR.sub_bill_id == sub_bill_id)
.first())
if (application.type == ApplicationType.CASH_LOAN_STAGING.value
and not overdue_bill):
logging.info("bill sub not in bomber %s",sub_bill_id)
return
with db.atomic():
repay_at = str_no_utc_datetime(payload['latest_repay_at'])
Application.update(
repay_at=repay_at
).where(Application.id == application.id).execute()
# 预测呼出系统上线后 全部认为 is_bombed = True
RepaymentLog.create(
application=application.id,
is_bombed=True,
current_bomber=application.latest_bomber_id,
cycle=application.cycle,
principal_part=principal_part,
late_fee_part=late_fee_part,
repay_at=paid_at,
ptp_bomber=application.ptp_bomber,
latest_call=application.latest_call,
periods=overdue_bill.periods if overdue_bill else None,
overdue_bill_id=overdue_bill.id if overdue_bill else None,
partner_bill_id=partner_bill_id
)
# 智能催收 —— 催收号码进行排序
phone_status = PhoneStatus.CONNECTED.value
real_relationship = RealRelationship.user_values()
commit = CallActionCommit.NO.value
number = (CallActions.select(CallActions.number)
.where(CallActions.phone_status == phone_status,
CallActions.real_relationship << real_relationship,
CallActions.commit == commit,
CallActions.application == application.id)
.order_by(-CallActions.created_at)
.first())
if number:
(Contact.update(call_priority=PriorityStatus.REPAY.value)
.where(Contact.user_id == application.user_id,
Contact.call_priority == PriorityStatus.LAST.value)
).execute()
(Contact.update(call_priority=PriorityStatus.LAST.value)
.where(Contact.user_id == application.user_id,
Contact.number == number.number)
).execute()
if not application.latest_bomber_id:
return
Inbox.create(
title='application %s,sub_bill_id %s repaid' % (
application.external_id, sub_bill_id),
content='application %s,sub_bill_id %s repaid' % (
application.external_id, sub_bill_id),
receiver=(application.latest_bomber_id or
application.last_bomber_id),
category=InboxCategory.REPAID.value,
)
@action(MessageAction.BILL_RELIEF)
def bill_relief(payload, msg_id):
"""已废弃"""
bill = payload['head_bill']
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_row = Application.update(
repay_at=repay_at,
).where(Application.id == bill['external_id']).execute()
logging.info('application %s bill relief done', bill['external_id'])
return updated_row
# 还款完成,
@action(MessageAction.BILL_CLEARED)
@action(MessageAction.BILL_CLEARED_BEFORE_CONFIRM)
def bill_cleared(payload, msg_id):
"""
BILL_CLEARED_BEFORE_CONFIRM仅在bomber系统中使用,MST清除账单时先修改其状态
为还款完成,让其不被催收
"""
external_id = payload.get('external_id')
sub_bill_id = payload.get('bill_sub_id')
if not external_id:
logging.warning('payload has no external_id. {}'.format(str(payload)))
return
# 如果还清,清除不在拨打ivr
AutoIVR.update(
status=AutoIVRStatus.REPAID.value
).where(AutoIVR.application_id == external_id).execute()
try:
bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
bill = bill[0]
except Exception:
logging.error('get bill info failed: '
'Request To Repayment Error', external_id)
return
application = Application.filter(
Application.external_id == external_id,
Application.status << [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.BAD_DEBT.value,
ApplicationStatus.AB_TEST.value]
).first()
if not application:
logging.info('application %s repay clear, not found bomber record',
external_id)
return
with db.atomic():
# 修改本次还清的自账单状态
sub_bill_update = (OverdueBill.update(
status = ApplicationStatus.REPAID.value,
finished_at = datetime.now())
.where(OverdueBill.collection_id == application.id,
OverdueBill.sub_bill_id == sub_bill_id)
.execute())
# 如果是分期的件,判断是否完成还款
overdue_bill = (OverdueBill.select()
.where(OverdueBill.collection_id == application.id,
OverdueBill.status != 2,
OverdueBill.sub_bill_id != sub_bill_id))
if overdue_bill.exists():
if application.latest_bomber_id:
Inbox.create(
title='application %s sub_bill_id %s cleared' % (
application.external_id, sub_bill_id),
content='application %s sub_bill_id %s cleared' % (
application.external_id, sub_bill_id),
receiver=application.latest_bomber_id,
category=InboxCategory.CLEARED.value,
)
return
# 还款完成同步更新到外包
partner = DispatchApp.filter(DispatchApp.application == application.id)
if partner.exists():
DispatchApp.update(
status=DisAppStatus.ABNORMAL.value
).where(DispatchApp.application == application.id).execute()
# 更新自动拨号系统队列 application 状态
AutoCallList.update(
status=AutoListStatus.REMOVED.value,
description='bill clear'
).where(AutoCallList.application == application.id).execute()
application.status = ApplicationStatus.REPAID.value
application.finished_at = datetime.now()
application.paid_at = datetime.now()
# 如果逾期天数为0说明没有逾期,该件不应该进bomber
if int(bill.get("overdue_days")) <= 0:
application.no_active = 1
(RepaymentLog.update(no_active=1)
.where(RepaymentLog.application == application.id)
.execute())
application.save()
bomber_id = application.latest_bomber_id
# c1b月底清件之后会入案,支付完成时要出案,2是默认的bomber_id
if (application.cycle in (Cycle.C1A.value,Cycle.C1B.value) and
not bomber_id):
bomber_id = application.cycle
if not bomber_id:
return
(DispatchAppHistory.update(
out_at=datetime.now()
).where(
DispatchAppHistory.application == application.id,
DispatchAppHistory.bomber_id == bomber_id)).execute()
if not application.latest_bomber_id:
return
item = (OldLoanApplication
.get_or_none(OldLoanApplication.status ==
OldLoanStatus.PROCESSING.value,
OldLoanApplication.application_id ==
application.id))
if item:
end_old_application(item, paid=True)
out_record(src_bomber_id=bomber_id,
application_ids=[item.application_id])
Inbox.create(
title='application %s cleared' % application.external_id,
content='application %s cleared' % application.external_id,
receiver=application.latest_bomber_id,
category=InboxCategory.CLEARED.value,
)
# 同步bill2
@action(MessageAction.OVERDUE_BILL_SYNC)
def overdue_bill_sync(payload, msg_id):
"""已废弃"""
bill2_list = payload
updated_count = 0
with db.atomic():
for bill in bill2_list:
principal = Decimal(bill['principal'])
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_count += Application.update(
amount=principal,
repay_at=repay_at,
).where(Application.id == bill['external_id']).execute()
logging.info('overdue sync done, updated count: %s', updated_count)
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)
def calc_overdue_days_over(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
#更新逾期天数大于95天的件
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query = (Application
.update(overdue_days=overdue_days)
.where(Application.status <<
[ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.AB_TEST.value],
Application.overdue_days > 95,
Application.type == ApplicationType.CASH_LOAN.value))
updated_rows_count = query.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count)
try:
calc_overdue_days_over_instalment()
except Exception as e:
logging.error("calc_overdue_days_over_instalment_error: %s"%str(e))
# 计算overdue_days后自动触发升级
apps = Application.filter(
Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value],
Application.overdue_days > 95,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(
MessageAction.BOMBER_AUTOMATIC_ESCALATION,
{'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
# 计算逾期天数超过95天的件的逾期天数
def calc_overdue_days_over_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.AB_TEST.value]
for status in sub_bill_status_list:
# 更新逾期天数
query = (OverdueBill.update(overdue_days=overdue_days)
.where(OverdueBill.status == status,
OverdueBill.overdue_days > 95))
updated_rows_count = query.execute()
logging.info("calc_overdue_days_over_instalment done,count:%s,status:%s" %
(updated_rows_count, status))
# 获取所有的子账单信息
overdue_bills = (OverdueBill
.select(OverdueBill.collection_id,
OverdueBill.overdue_days)
.join(Application, JOIN_LEFT_OUTER,
on=OverdueBill.collection_id == Application.id)
.where(Application.status == status,
(Application.type ==
ApplicationType.CASH_LOAN_STAGING.value)))
# 获取每个分期催收单要更新的逾期天数
app_update = {}
for ob in overdue_bills:
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
# 更新催收单的逾期天数
for aid, a_days in app_update.items():
q = (Application.update(overdue_days=a_days)
.where(Application.id == aid)
.execute())
logging.info("update instalment application done")
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)
def calc_overdue_days(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query_unclaimed = (Application
.update(overdue_days=overdue_days)
.where(Application.status ==
ApplicationStatus.UNCLAIMED.value,
Application.overdue_days <= 95,
(Application.type ==
ApplicationType.CASH_LOAN.value)))
updated_rows_count_unclaimed = query_unclaimed.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_unclaimed)
query_processing = (Application
.update(overdue_days=overdue_days)
.where(Application.status ==
ApplicationStatus.PROCESSING.value,
Application.overdue_days <= 95,
(Application.type ==
ApplicationType.CASH_LOAN.value)))
updated_rows_count_processing = query_processing.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_processing)
query_test = (Application
.update(overdue_days=overdue_days)
.where(Application.status ==
ApplicationStatus.AB_TEST.value,
Application.overdue_days <= 95,
(Application.type ==
ApplicationType.CASH_LOAN.value)))
updated_rows_count_test = query_test.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_test)
# 分期账单计算逾期天数
calc_overdue_days_instalment()
# 计算overdue_days后自动触发升级
apps = Application.select(Application.id).where(
Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value],
Application.overdue_days <= 95,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(
MessageAction.BOMBER_AUTOMATIC_ESCALATION,
{'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
# overdue_days 计算完成后,修改C1A_entry(预期天数为4的设为C1A)
Application.update(
C1A_entry=datetime.now()
).where(
Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value],
Application.overdue_days == 4
).execute()
# 分期的件计算逾期天数
def calc_overdue_days_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.AB_TEST.value]
# 获取当月第一天的时间
today_now_time = datetime.now()
month_first_day = today_now_time.replace(day=1,
hour=1,
minute=30,
second=0,
microsecond=0)
for status in sub_bill_status_list:
# 更新逾期天数
query = (OverdueBill.update(overdue_days = overdue_days)
.where(OverdueBill.status == status,
OverdueBill.overdue_days <= 95))
updated_rows_count = query.execute()
logging.info("calc_overdue_days_instalment done,count:%s,status:%s" %
(updated_rows_count, status))
# 获取所有的子账单信息
overdue_bills = (OverdueBill
.select(OverdueBill.status,
OverdueBill.created_at,
OverdueBill.collection_id,
OverdueBill.overdue_days)
.join(Application, JOIN_LEFT_OUTER,
on=OverdueBill.collection_id == Application.id)
.where(Application.status == status,
(Application.type ==
ApplicationType.CASH_LOAN_STAGING.value)))
# 获取每个分期催收单要更新的逾期天数
app_update = {}
for ob in overdue_bills:
# 排除到分期这个月之前还款完成的那一期
if (ob.status == ApplicationStatus.REPAID.value and
ob.created_at < month_first_day):
continue
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id],ob.overdue_days)
app_update[ob.collection_id] = ob_days
# 更新催收单的逾期天数
for aid,a_days in app_update.items():
q = (Application.update(overdue_days = a_days)
.where(Application.id == aid)
.execute())
logging.info("update instalment application done")
@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)
def automatic_escalation(payload, msg_id):
app_ids = payload.get('application_list', [])
if not app_ids:
return
# 过滤掉已完成的订单
apps = (Application.select()
.where(Application.id.in_(app_ids),
Application.status != ApplicationStatus.REPAID.value))
for a in apps:
new_cycle = application_entry_different_calculations(a)
if a.overdue_days < 90:
logging.info(
"automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}".format(
a.id, new_cycle, a.cycle, a.overdue_days))
if new_cycle > a.cycle:
with db.atomic():
if (a.latest_bomber_id or
a.cycle in (Cycle.C1A.value, Cycle.C1B.value)):
bomber_id = (a.latest_bomber_id
if a.latest_bomber_id else a.cycle)
(DispatchAppHistory.update(
out_at=datetime.now(),
out_overdue_days=a.overdue_days,
).where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == bomber_id
)).execute()
Escalation.create(
application=a.id,
type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value,
current_cycle=a.cycle,
escalate_to=new_cycle,
current_bomber_id=a.latest_bomber,
)
# 升级的时候如果是外包的件更新dispatch_app中的状态
dis_app_update = (DispatchApp
.update(status = DisAppStatus.ABNORMAL.value)
.where(DispatchApp.application == a.id))
dis_app_update.execute()
a.cycle = new_cycle
a.last_bomber = a.latest_bomber
a.status = ApplicationStatus.UNCLAIMED.value
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
# 升级之后 拨打次数清零
a.called_times = 0
if new_cycle == Cycle.C1B.value:
a.C1B_entry = datetime.now()
elif new_cycle == Cycle.C2.value:
a.C2_entry = datetime.now()
elif new_cycle == Cycle.C3.value:
a.C3_entry = datetime.now()
a.save()
logging.info('automatic escalation done')
# 把部分件的进入C1B的时间改为10天
def application_entry_different_calculations(app):
conf = {
1: [1, 10],
2: [11, 30],
3: [31, 60],
4: [61, 90],
5: [91, 999999],
}
for new_cycle,scopes in conf.items():
if scopes[0] <= app.overdue_days <= scopes[1]:
return new_cycle
return app.cycle
@action(MessageAction.BOMBER_CALC_SUMMARY)
def cron_summary(payload, msg_id):
"""已废弃"""
employees = Bomber.select(Bomber, Role).join(Role)
summary = {
i.id: {
'cycle': i.role.cycle,
'claimed': 0,
'completed': 0,
'cleared': 0,
'escalated': 0,
'transferred': 0,
'promised': 0,
'amount_recovered': Decimal(0),
'calls_made': 0,
'calls_connected': 0,
'sms_sent': 0,
}
for i in employees
}
# 每天 2点 15分 计算 昨天的情况
now_date = date.today()
cal_date = now_date - timedelta(days=1)
# 当日下了多少ptp
claimed = (Application
.select(Application.latest_bomber,
fn.COUNT(Application.id).alias('claimed'))
.where(fn.DATE(Application.claimed_at) == cal_date,
Application.status <<
[ApplicationStatus.PROCESSING.value,
ApplicationStatus.REPAID.value],
Application.latest_bomber.is_null(False))
.group_by(Application.latest_bomber))
# 当日ptp还款件数目
cleared = (Application
.select(Application.latest_bomber,
fn.COUNT(Application.id).alias('cleared'))
.where(fn.DATE(Application.finished_at) == cal_date,
Application.status == ApplicationStatus.REPAID.value,
Application.latest_bomber.is_null(False))
.group_by(Application.latest_bomber))
# 当日有多少个ptp被维护
completed = (Application
.select(Application.latest_bomber,
fn.COUNT(Application.id).alias('completed'))
.where(Application.latest_bombing_time.is_null(False),
fn.DATE(Application.latest_bombing_time) == cal_date,
Application.latest_bomber.is_null(False))
.group_by(Application.latest_bomber))
# 手工维护的件多少个件进入下一个cycle
escalated = (Escalation
.select(Escalation.current_bomber,
fn.COUNT(Escalation.id).alias('escalated'))
.where(fn.DATE(Escalation.created_at) == cal_date,
Escalation.type == EscalationType.AUTOMATIC.value,
Escalation.current_bomber.is_null(False),
Escalation.status == ApprovalStatus.APPROVED.value)
.group_by(Escalation.current_bomber))
# 当日从某人手上移出多少个件
transferred = (Transfer
.select(Transfer.operator,
fn.COUNT(Transfer.id).alias('transferred'))
.where(fn.DATE(Transfer.reviewed_at) == cal_date,
Transfer.status == ApprovalStatus.APPROVED.value)
.group_by(Transfer.operator))
# 当天的下p件有多少有进展
promised = (
BombingHistory
.select(BombingHistory.bomber,
fn.COUNT(BombingHistory.id).alias('promised'))
.where(fn.DATE(BombingHistory.created_at) == cal_date,
BombingHistory.result == BombingResult.HAS_PROGRESS.value)
.group_by(BombingHistory.bomber)
)
# 当天催回的金额
amount_recovered = (RepaymentLog
.select(RepaymentLog.current_bomber,
fn.SUM(RepaymentLog.principal_part)
.alias('principal_part'),
fn.SUM(RepaymentLog.late_fee_part)
.alias('late_fee_part'))
.where(fn.DATE(RepaymentLog.repay_at) == cal_date,
RepaymentLog.is_bombed == True,
RepaymentLog.current_bomber.is_null(False))
.group_by(RepaymentLog.current_bomber))
# calllog表已废弃
calls_made = (CallLog
.select(CallLog.user_id,
fn.COUNT(CallLog.record_id).alias('calls_made'))
.where(fn.DATE(CallLog.time_start) == cal_date,
CallLog.system_type == '1')
.group_by(CallLog.user_id))
# calllog表已废弃
calls_connected = (CallLog
.select(CallLog.user_id,
fn.COUNT(CallLog.record_id)
.alias('calls_connected'))
.where(fn.DATE(CallLog.time_start) == cal_date,
CallLog.duration > 10,
CallLog.system_type == '1').
group_by(CallLog.user_id))
# 当天发送的所有短信
sms_sent = (ConnectHistory
.select(ConnectHistory.operator,
fn.COUNT(ConnectHistory.id).alias('sms_sent'))
.where(ConnectHistory.type.in_(ConnectType.sms()),
ConnectHistory.created_at >= cal_date,
ConnectHistory.created_at < now_date
)
.group_by(ConnectHistory.operator))
for i in claimed:
summary[i.latest_bomber_id]['claimed'] += i.claimed
for i in completed:
summary[i.latest_bomber_id]['completed'] += i.completed
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
for i in escalated:
summary[i.current_bomber_id]['escalated'] += i.escalated
for i in transferred:
summary[i.operator_id]['transferred'] += i.transferred
for i in promised:
summary[i.bomber_id]['promised'] += i.promised
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in calls_made:
summary[int(i.user_id)]['calls_made'] += i.calls_made
for i in calls_connected:
summary[int(i.user_id)]['calls_connected'] += i.calls_connected
for i in sms_sent:
summary[i.operator_id]['sms_sent'] += i.sms_sent
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({
'bomber': bomber_id,
'cycle': data['cycle'],
'claimed': data['claimed'],
'completed': data['completed'],
'cleared': data['cleared'],
'escalated': data['escalated'],
'transferred': data['transferred'],
'promised': data['promised'],
'amount_recovered': data['amount_recovered'],
'calls_made': data['calls_made'],
'calls_connected': data['calls_connected'],
'sms_sent': data['sms_sent'],
'date': cal_date,
})
if insert_args:
Summary.insert_many(insert_args).execute()
cycle_args = []
# cal new in
# 按照 cycle 统计
escalated_in = (Escalation
.select(Escalation.escalate_to,
fn.COUNT(Escalation.id).alias('escalated_in'))
.where(Escalation.status == ApprovalStatus.APPROVED.value,
fn.DATE(Escalation.created_at) == cal_date)
.group_by(Escalation.escalate_to))
for i in escalated_in:
cycle_args.append({
'cycle': i.escalate_to,
'escalated_in': i.escalated_in,
'date': cal_date,
})
amount_recovered_total = (
RepaymentLog
.select(RepaymentLog.cycle,
fn.SUM(RepaymentLog.principal_part).alias('principal_part'),
fn.SUM(RepaymentLog.late_fee_part).alias('late_fee_part'))
.where(fn.DATE(RepaymentLog.repay_at) == cal_date)
.group_by(RepaymentLog.cycle)
)
for i in amount_recovered_total:
amount_recovered_total = i.principal_part + i.late_fee_part
cycle_args.append({
'cycle': i.cycle,
'amount_recovered_total': amount_recovered_total,
'date': cal_date,
})
if cycle_args:
Summary.insert_many(cycle_args).execute()
logging.info('cal summary done')
# 报表计算结束后 再更新逾期天数 触发自动升级
send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})
@action(MessageAction.BOMBER_CALC_SUMMARY2)
def cron_summary2(payload, msg_id):
"""已废弃,定时任务还在执行,具体情况待确定"""
cal_date = date.today() - timedelta(days=1)
employees = Bomber.select(Bomber, Role).join(Role)
auto_call_actions = (
AutoCallActions
.select(
AutoCallActions.bomber,
AutoCallActions.result,
fn.COUNT(AutoCallActions.id).alias('count')
)
.where(fn.DATE(AutoCallActions.created_at) == cal_date)
)
amount_recovered = (RepaymentLog
.select(RepaymentLog.current_bomber,
fn.SUM(RepaymentLog.principal_part)
.alias('principal_part'),
fn.SUM(RepaymentLog.late_fee_part)
.alias('late_fee_part'))
.where(fn.DATE(RepaymentLog.repay_at) == cal_date,
RepaymentLog.current_bomber.is_null(False),
RepaymentLog.is_bombed == True))
cleared = (Application
.select(Application.latest_bomber,
fn.COUNT(Application.id).alias('cleared'))
.where(fn.DATE(Application.finished_at) == cal_date,
Application.status == ApplicationStatus.REPAID.value,
Application.latest_bomber.is_null(False)))
auto_call_actions = auto_call_actions.group_by(
AutoCallActions.bomber, AutoCallActions.result
)
amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)
cleared = cleared.group_by(Application.latest_bomber)
summary = {
e.id: {
'cycle': e.role.cycle,
'answered_calls': 0,
'ptp': 0,
'follow_up': 0,
'not_useful': 0,
'cleared': 0,
'amount_recovered': 0,
}
for e in employees
}
for a in auto_call_actions:
summary[a.bomber_id]['answered_calls'] += a.count
if a.result == AutoCallResult.PTP.value:
summary[a.bomber_id]['ptp'] += a.count
if a.result == AutoCallResult.FOLLOW_UP.value:
summary[a.bomber_id]['follow_up'] += a.count
if a.result == AutoCallResult.NOT_USEFUL.value:
summary[a.bomber_id]['not_useful'] += a.count
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({
'bomber': bomber_id,
'cycle': data['cycle'],
'answered_calls': data['answered_calls'],
'ptp': data['ptp'],
'follow_up': data['follow_up'],
'not_useful': data['not_useful'],
'cleared': data['cleared'],
'amount_recovered': str(data['amount_recovered']),
'date': cal_date,
})
if insert_args:
Summary2.insert_many(insert_args).execute()
@action(MessageAction.BOMBER_SYNC_CONTACTS)
def sync_suggested_contacts(payload, msg_id):
""" suggested contacts sync """
applications = (Application
.select(Application.id, Application.user_id)
.where(Application.status <<
[ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value]))
logging.debug('start sync contact')
for a in applications:
sync_contacts(a)
logging.info('contact sync finished')
def sync_contacts(application):
logging.info('application %s start sync contact', application.id)
# 添加联系人信息
contacts = Contact.filter(Contact.user_id == application.user_id)
existing_numbers = {contact.number for contact in contacts}
# sms contacts
insert_contacts = []
sms_contacts = GoldenEye().get(
'/applications/%s/sms-contacts' % application.external_id
)
if not sms_contacts.ok:
sms_contacts = []
logging.info('get user %s sms contacts failed', application.external_id)
else:
sms_contacts = sms_contacts.json()['data']
for i in sms_contacts:
if i['number'] in existing_numbers:
continue
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'],
'number': i['number'],
'relationship': Relationship.SUGGESTED.value,
'source': 'sms contacts',
'real_relationship': Relationship.SUGGESTED.value
})
existing_numbers.add(i['number'])
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
# call frequency
insert_contacts = []
cf = GoldenEye().get(
'/applications/%s/call/frequency' % application.external_id
)
if not cf.ok:
call_frequency = []
logging.error('get application %s call frequency error',
application.external_id)
else:
call_frequency = cf.json()['data']
with db.atomic():
for i in call_frequency:
if i['number'] in existing_numbers:
(Contact
.update(total_count=i['total_count'],
total_duration=i['total_duration'])
.where(Contact.number == i['number'],
Contact.user_id == application.user_id))
continue
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'],
'number': i['number'],
'relationship': Relationship.SUGGESTED.value,
'total_count': i['total_count'],
'total_duration': i['total_duration'],
'source': 'call frequency',
'real_relationship': Relationship.SUGGESTED.value
})
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
@action(MessageAction.BOMBER_AUTO_SMS)
@deprecated(version='1.0', reason='This function will be removed soon')
def bomber_auto_sms(payload, msg_id):
day_diff = int(payload['day_diff'])
custom_type = payload.get('custom_type')
msg_type = payload['msg_type']
logging.info('auto sms %s sending', msg_type)
applications = (
Application
.select()
.where(Application.overdue_days == day_diff,
Application.status << [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value],
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < datetime.today().date()))
)
if custom_type == 'new':
applications = applications.where(Application.loan_success_times < 3)
if custom_type == 'old':
applications = applications.where(Application.loan_success_times >= 3)
templates = (
Template.select(Template.text, Template.app)
.where(Template.type == ConnectType.AUTO_SMS.value,
Template.id << Template.get_auto_sms_tpl(msg_type))
)
tpl_text = dict()
for tpl in templates:
tpl_text[tpl.app] = tpl.text
data_list = []
for a in applications:
tpl_data = {
'user_name': a.user_name,
'due_days': a.overdue_days,
'app_name': a.app,
'phone': a.user_mobile_no,
'cs_number': cs_number_conf.get(a.app, '02150202889'),
}
content = tpl_text[a.app].format(**tpl_data)
data_list.append({
'phone': '62' + a.user_mobile_no,
'content': content,
'app': a.app,
})
if not data_list:
logging.info('auto sms %s do not need sending', msg_type)
return
send_sms(data_list, msg_type, SmsChannel.NUSA.value)
@action(MessageAction.BOMBER_AUTO_MESSAGE_DAILY)
def bomber_auto_message_daily(payload, msg_id):
app_dict = dict(zip(AppName.keys(), AppName.values()))
#当天自动外呼成功的电话记录
auto_call_list = AutoCallActionsR \
.select(AutoCallActionsR.application_id) \
.where(fn.DATE(AutoCallActionsR.created_at) == fn.CURDATE())
applications = (
ApplicationR
.select()
.where(ApplicationR.overdue_days < 30,
ApplicationR.overdue_days > 4,
ApplicationR.type == ApplicationType.CASH_LOAN.value,
ApplicationR.status << [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.AB_TEST.value],
ApplicationR.promised_date.is_null(True) |
(fn.DATE(ApplicationR.promised_date) < datetime.today().date()),
~(ApplicationR.id << auto_call_list))
)
stage_list1 = range(*AutoCallMessageCycle.NEW_STAGE1.value['scope'], 3) #5,8,11,14
stage_list2 = range(*AutoCallMessageCycle.STAGE2.value['scope'], 3) #15,18
stage_list3 = range(*AutoCallMessageCycle.STAGE3.value['scope'], 3)
sms_list = defaultdict(list)
fcm_list = defaultdict(list)
for a in applications:
overdue_type = ''
if a.overdue_days in stage_list1:
if a.loan_success_times < 3:
overdue_type = AutoCallMessageCycle.NEW_STAGE1.value['type']
else:
overdue_type = AutoCallMessageCycle.OLD_STAGE1.value['type']
if a.overdue_days in stage_list2:
overdue_type = AutoCallMessageCycle.STAGE2.value['type']
if a.overdue_days in stage_list3:
overdue_type = AutoCallMessageCycle.STAGE3.value['type']
if overdue_type == '':
continue
# format app name
app_name = app_dict.get(a.app.upper(), AppName.default().value)
try:
tpl_id = Template.get_daily_auto_sms_tpl(overdue_type, app_name)
except KeyError:
logging.warning('Key error {}, id is {}'.format(
(overdue_type, app_name), a.id))
continue
data_map = {
'user_name': a.user_name,
'app_name': app_name,
'overdue_days': a.overdue_days,
'cs_number': cs_number_conf.get(a.app, '')
}
sms_list[(overdue_type, tpl_id, a.app)].append({
'receiver': '62' + a.user_mobile_no,
'data_map': data_map
})
fcm_list[(overdue_type, tpl_id, a.app)].append({
'receiver': a.user_id,
'data_map': data_map
})
for (msg_type, tpl_id, app_name), data_list in sms_list.items():
auto_send_sms_and_fcm(data_list, tpl_id, app_name, "SMS")
for (msg_type, tpl_id, app_name), data_list in sms_list.items():
auto_send_sms_and_fcm(data_list, tpl_id, app_name, "FCM")
#分期逾期短信
@action(MessageAction.BOMBER_INSTALMENT_AUTO_MESSAGE_DAILY)
def bomber_instalment_auto_message_daily(payload, msg_id):
applications = (ApplicationR.select(ApplicationR.id,
ApplicationR.app,
ApplicationR.user_id,
ApplicationR.user_name,
ApplicationR.user_mobile_no,
ApplicationR.loan_success_times,
OverdueBillR.status,
OverdueBillR.sub_bill_id,
OverdueBillR.overdue_days, )
.join(OverdueBillR, JOIN_LEFT_OUTER,
on=ApplicationR.id == OverdueBillR.collection_id)
.where(ApplicationR.type ==
ApplicationType.CASH_LOAN_STAGING.value,
ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.overdue_days < 90,
ApplicationR.promised_date.is_null(True) |
(fn.DATE(
ApplicationR.promised_date) < datetime.today().date()),
)
.dicts())
# 计算真实的逾期天数和欠款情况
app_overdues = {}
for app in applications:
if app["status"] == ApplicationStatus.REPAID.value:
continue
if app["id"] in app_overdues:
overdue_days = app_overdues[app["id"]]["overdue_days"]
app_overdues[app["id"]]["overdue_days"] = max(app["overdue_days"],
overdue_days)
app_overdues[app["id"]]["bill_sub_ids"].append(app["sub_bill_id"])
else:
app_overdues[app["id"]] = {
"app_name": app["app"],
"user_id": app["user_id"],
"user_name": app["user_name"],
"overdue_days": app["overdue_days"],
"bill_sub_ids": [app["sub_bill_id"]],
"phone": '62' + app["user_mobile_no"],
"loan_success_times": app["loan_success_times"],
"cs_number": cs_number_conf.get(app["app"], '02150202889')
}
# 获取需要发短信的催收单和计算对应的未支付金额
sms_dict = {}
sub_bill_ids = []
send_message = defaultdict(list)
send_fcm = defaultdict(list)
for aid, app in app_overdues.items():
message_id = Template.get_daily_instalment_auto_sms_tpl(
overdue_days=app["overdue_days"],
loan_times=app["loan_success_times"]
)
if message_id:
app["tpl_id"] = message_id
sms_dict[aid] = app
sub_bill_ids.extend(app["bill_sub_ids"])
if not sms_dict:
logging.info("no application need send sms")
return
sub_bills = []
try:
for index in range(0,len(sub_bill_ids),30):
sub_bill = BillService().sub_bill_list(
bill_sub_ids=sub_bill_ids[index:index+30])
sub_bills += sub_bill
except Exception as e:
logging.info("send sms get bill error:%s" % str(e))
return
sub_bills_dict = {int(sb["id"]): sb for sb in sub_bills}
for aid, app in sms_dict.items():
amount = 0
for sbid in app["bill_sub_ids"]:
amount += sub_bills_dict.get(sbid, {}).get("unpaid", 0)
data_map = {
"user_name": app["user_name"],
"app_name": app["app_name"],
"overdue_days": app["overdue_days"],
"cs_number": app["cs_number"],
"amount": str(amount)
}
send_message[(app['tpl_id'], app["app_name"])].append({
"receiver": app["phone"],
"data_map": data_map
})
send_fcm[(app['tpl_id'], app["app_name"])].append({
"receiver": app["user_id"],
"data_map": data_map
})
for (tpl_id, app_name), data_list in send_message.items():
auto_send_sms_and_fcm(data_list, tpl_id, app_name, "SMS")
for (msg_type, tpl_id, app_name), data_list in send_fcm.items():
auto_send_sms_and_fcm(data_list, tpl_id, app_name, "FCM")
def auto_send_sms_and_fcm(data_list, tpl_id, app_name, message_type):
if not data_list:
return
# 200 条 一次请求
for idx in range(0, len(data_list), 200):
request_json = {
"app_name": app_name,
"failed_retry": True,
"is_masking": True,
"list": data_list[idx: idx+200],
"message_level": 1,
"message_type": message_type,
"sms_type": 4 if message_type == "SMS" else 0,
"type_id": tpl_id
}
try:
result = MessageService().send_batch_template(**request_json)
if not result.get("result"):
logging.error()
except Exception as e:
logging.error()
return
logging.info("")
def get_danamall_msg_service(app_name, message_service):
if app_name == AppName.DANAMALL.value:
# token = app.config['service.message.%s.token' % app_name.lower()]
message_service = Message(version=app_name)
return message_service
#催收员发送短信,提醒承诺时间
@action(MessageAction.BOMBER_REMIND_PROMISE)
def bomber_remind_promise(payload, msg_id):
day_diff = int(payload['day_diff'])
msg_type = payload['msg_type']
logging.info('auto sms %s sending', msg_type)
applications = (
Application
.select()
.where(
fn.DATEDIFF(fn.NOW(), Application.promised_date) == day_diff,
Application.status << [
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value,
]
)
)
templates = (
Template
.select(Template.text, Template.app)
.where(Template.type == ConnectType.AUTO_SMS.value,
Template.id << Template.get_auto_sms_tpl(msg_type))
)
tpl_text = {tpl.app: tpl.text for tpl in templates}
message_date_dict = defaultdict(list)
for a in applications:
tpl_data = {
'user_name': a.user_name,
'due_days': a.overdue_days,
'app_name': a.app,
'phone': a.user_mobile_no,
'cs_number': cs_number_conf.get(a.app, '02150202889'),
'promised_date': a.promised_date.strftime('%d-%m-%Y'),
}
content = tpl_text[a.app].format(**tpl_data)
message_date_dict[a.app].append(
{
"content": content,
"receiver": '62' + a.user_mobile_no,
"title": ""
}
)
for app_name, data_list in message_date_dict.items():
send_sms(data_list, msg_type, app_name)
@action(MessageAction.BOMBER_DISCOUNT_APPROVED)
def bomber_discount_approved(payload, msg_id):
app_id = payload['id']
msg_type = payload['msg_type']
discount_to = payload['discount_to']
effective_to = payload['effective_to']
application = Application.filter(Application.id == app_id).first()
if not application:
logging.error('discount approved msg send failed '
'application %s not found', app_id)
return
template = (
Template
.select(Template.text, Template.app)
.where(Template.type == ConnectType.AUTO_SMS.value,
Template.id << Template.get_auto_sms_tpl(msg_type),
Template.app == application.app)
.first()
)
if not template:
logging.error('discount approved msg send failed '
'template %s not found', msg_type)
return
promised_date = None
if application.promised_date:
promised_date = application.promised_date.strftime('%d-%m-%Y')
tpl_data = {
'user_name': application.user_name,
'due_days': application.overdue_days,
'app_name': application.app,
'phone': application.user_mobile_no,
'cs_number': cs_number_conf.get(application.app, '02150202889'),
'promised_date': promised_date,
'discount_to': discount_to,
'effective_to': effective_to,
}
content = template.text.format(**tpl_data)
data_list = [{
'receiver': '62' + application.user_mobile_no,
'content': content,
'title': "",
}]
send_sms(data_list, msg_type, application.app)
# 批量发送自定义短信
def send_sms(data_list, msg_type, app_name):
if not data_list:
return
for index in range(0, len(data_list), 200):
req_data = {
"app_name": app_name,
"failed_retry": True,
"is_masking": True,
"list": data_list[index: index+200],
"message_level": 0,
"message_type": "SMS",
"sms_type": 3
}
try:
result = MessageService().send_batch(**req_data)
if not result.get("result"):
logging.error(
"send_sms_failed:%s,req:%s,res:%s",msg_type,req_data,result)
except Exception as e:
logging.error(
"send_sms_error:%s,req:%s,res:%s,error:%s" % (
msg_type, req_data, result, str(e)))
return
logging.info("send_sms_success:%s", msg_type)
#生成自动外呼,和分件
@action(MessageAction.BOMBER_AUTO_CALL_LIST)
def bomber_auto_call_list(payload, msg_id):
with db.atomic():
#单期件分件,分给各期的外包后,余下分配内部指定id,的bomber
#外包主要通过partner区分不同阶段,同时识别bomber中的partner_id来识别外包账号
bomber_dispatch_app()
# 分期件分件,分件主要靠installment 识别不同期的bomber
dispatch_instalment_app()
#分件记录
dis_apps = (DispatchApp
.select(DispatchApp.application)
.where(DispatchApp.status == DisAppStatus.NORMAL.value))
c1_apps = (
Application
.select(Application.id,
Application.cycle,
Application.follow_up_date,
Application.called_times)
.where(
Application.status.not_in([ApplicationStatus.REPAID.value,
ApplicationStatus.AB_TEST.value]),
Application.cycle == Cycle.C1A.value,
Application.is_rejected == False, # noqa
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < datetime.today().date())
).order_by(Application.overdue_days, Application.apply_at)
)
dis_apps_ids = [da.application_id for da in dis_apps]
insert_args = []
for a in c1_apps:
if a.id in dis_apps_ids:
continue
insert_args.append({
'application': a.id,
'cycle': a.cycle,
'follow_up_date': a.follow_up_date,
'called_times': 1 if a.called_times else 0,
'description': 'init'
})
if not insert_args:
logging.error('no application need auto call')
#检索application表,插入数据至auto_call_list
with db.atomic():
AutoCallList.delete().execute()
for idx in range(0, len(insert_args), 100):
AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()
for idx in range(0, len(insert_args), 100):
application_list = [
i['application']
for i in insert_args[idx:idx + 100]
]
#获取校验后有效的电话号码
send_to_default_q(
MessageAction.BOMBER_AUTO_CALL_CONTACT,
{'application_list': application_list}
)
logging.info('bomber generate auto call list finished')
#将未下P,特定天数的件重分,即积压时间长的件,在分配
send_to_default_q(
MessageAction.UPDATE_BOMBER_FOR_SPECIAL,
{})
class ChangeBomberTool(object):
@staticmethod
def in_record(bomber_id, ids, bd):
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'),
R('null').alias('partner_id'),
SQL('DATE_ADD(CURDATE(),INTERVAL 14 DAY)')
.alias('expected_out_time'),
Application.overdue_days.alias(
'entry_overdue_days'))
.where(Application.status !=
ApplicationStatus.REPAID.value,
Application.id << ids))
(Application
.update(latest_bomber=bomber_id)
.where(Application.id.in_(ids))
.execute())
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bd),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@staticmethod
def out_record(a, bd):
_id = str(a.id)
(DispatchAppHistory.update(
out_at=datetime.now(),
out_overdue_days=a.overdue_days,
out_principal_pending=(
a.amount -
Decimal(bd[_id].get('principal_paid'))),
out_late_fee_pending=(
bd[_id].get('late_fee') -
bd[_id].get('late_fee_paid')),
)
.where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == a.latest_bomber_id
)).execute()
a.last_bomber = a.latest_bomber
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
a.save()
@staticmethod
def classify(l, b):
if len(l) == 1:
return l[0]
_l = filter(lambda x: x['bomber'] != b, l)
return min(_l, key=lambda x: len(x['ids']))
@action(MessageAction.UPDATE_BOMBER_FOR_SPECIAL)
def update_bomber_for_special(payload, msg_id):
"""
cycle 1b 每天将DPD21且没有处于下P状态的件,分配给另一个催收员
cycle 2 每天将DPD46且没有处于下P状态的件,分配给另一个催收员
cycle 3 每天将dpd76且没有处于下p状态的件,分配给另一个催收员
:param payload:
:param msg_id:
:return:
"""
filter_list = {Cycle.C1B.value: {"overdue_days": 21, "role_id": 5},
Cycle.C2.value: {"overdue_days": 46, "role_id": 6},
Cycle.C3.value: {"overdue_days": 76, "role_id": 8}}
cbt = ChangeBomberTool()
for cycle, values in filter_list.items():
overdue_days = values["overdue_days"]
bombers = (Bomber.select()
.where(Bomber.role == values["role_id"],
Bomber.instalment == 0,
Bomber.is_del == 0))
bids = {b.id:b for b in bombers}
apps = (Application.select()
.where(Application.cycle == cycle,
Application.type == ApplicationType.CASH_LOAN.value,
Application.overdue_days == overdue_days,
Application.status == ApplicationStatus.AB_TEST.value,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < date.today()),
Application.latest_bomber_id.in_(list(bids.keys()))))
classify_dict = defaultdict(list)
for b in bombers:
classify_dict[b.group_id].append({"bomber": b.id, "ids": []})
with db.atomic():
app_ids = [i.id for i in apps]
if app_ids and bids:
bills = BillService().bill_list(application_ids=app_ids)
bill_dict = {str(bill['application_id']): bill for bill in
bills}
for i in apps:
current_bomber = bids.get(i.latest_bomber_id)
if not current_bomber:
continue
classify_list = classify_dict.get(current_bomber.group_id)
d = cbt.classify(classify_list, i.latest_bomber_id)
d["ids"].append(i.id)
cbt.out_record(i, bill_dict)
for group_id, cl_list in classify_dict.items():
for item in cl_list:
cbt.in_record(item["bomber"], item["ids"], bill_dict)
else:
logging.info(
"cycle:{} empty application list {} or bomber list {}".format(
cycle, app_ids, list(bids.keys())))
try:
update_bomber_for_special_instalment()
except Exception as e:
logging.error("special_instalment_error:%s"%str(e))
# 分期c2,c3特殊分件
def update_bomber_for_special_instalment():
filter_list = {Cycle.C1B.value: 21, Cycle.C2.value: 46, Cycle.C3.value: 76}
for cycle,overdue_days in filter_list.items():
# 获取分期指定的催收员
bombers = (Bomber.select().where(Bomber.instalment == cycle,
Bomber.is_del == 0))
bids = {b.id:b for b in bombers}
# 获取催收单
apps = (Application.select()
.where(Application.cycle == cycle,
Application.status == ApplicationStatus.AB_TEST.value,
Application.type ==
ApplicationType.CASH_LOAN_STAGING.value,
Application.overdue_days == overdue_days,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < date.today()),
Application.latest_bomber_id.in_(list(bids.keys()))))
classify_dict = defaultdict(list)
for b in bombers:
classify_dict[b.group_id].append({"bomber":b.id, "ids":[]})
for a in apps:
current_bomber = bids.get(a.latest_bomber_id)
if not current_bomber:
continue
classify_list = classify_dict.get(current_bomber.group_id)
d = ChangeBomberTool.classify(classify_list, a.latest_bomber_id)
d["ids"].append(a.id)
with db.atomic():
for group_id,classify_list in classify_dict.items():
for cl in classify_list:
aids = cl["ids"]
if not aids:
continue
latest_bomber_id = cl["bomber"]
q = (Application.update(latest_bomber = latest_bomber_id,
last_bomber = Application.latest_bomber)
.where(Application.id << aids)
.execute())
record_param = {
"cycle": cycle,
"application_ids": aids,
"dest_bomber_id": latest_bomber_id,
}
out_and_in_record_instalment(**record_param)
def bomber_dispatch_app():
# 将单期件c1a分件给外包,外包需设置,partner
try:
c1a_dispatch_app()
except Exception as e:
logging.error("c1a_dispatch_app error:%s"%str(e))
cycle = {
1: 10,
2: 30,
3: 60,
4: 90
}
# 单期外包 Cycle.C2 overdue_day 31
apps = (Application.select()
.where(fn.DATE(Application.C2_entry) == date.today(),
Application.type == ApplicationType.CASH_LOAN.value))
partners = (Partner.select()
.where(Partner.status == PartnerStatus.NORMAL.value,
Partner.cycle == Cycle.C2.value))
apps_ids = [a.id for a in apps]
dispatch_inserts = []
start_index = 0
apps_length = len(apps_ids)
logging.warning('apps length %s' % str(apps_length))
for p in partners: # 目前就一个partner
bombers = (Bomber.select()
.where(Bomber.partner == p.id,
Bomber.status != BomberStatus.OUTER_LEADER.value,
Bomber.is_del == 0))
gen = CycleIter([b.id for b in bombers])
existing_list = []
end_index = start_index + int(apps_length * p.app_percentage)
logging.info('partner length %s' % str(end_index))
if not apps_ids[start_index:end_index]:
continue
bills = BillService().bill_list(
application_ids=apps_ids[start_index:end_index])
bill_dict = {bill['application_id']: bill for bill in bills}
for a_id in apps_ids[start_index:end_index]:
bomber = average_gen(gen, existing_list)
q = (DispatchApp.delete()
.where(DispatchApp.application == a_id)
.execute())
dispatch_inserts.append({
'application': a_id,
'bomber': bomber,
'partner': p.id,
})
# 件分给外包后,对数据进行备份以备数据分析
application = (Application.select()
.where(Application.id == a_id)).first()
application.latest_bomber = bomber
application.status = ApplicationStatus.AB_TEST.value
application.ptp_bomber = None
application.save()
day_next_cycle = (cycle.get(application.cycle) -
application.overdue_days)
DispatchAppHistory.create(
application=a_id,
partner_id=p.id,
bomber_id=bomber,
entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=(
application.amount -
Decimal(bill_dict[a_id].get('principal_paid'))),
entry_late_fee_pending=(
Decimal(bill_dict[a_id].get('late_fee')) -
Decimal(bill_dict[a_id].get('late_fee_paid'))),
expected_out_time=(date.today() +
timedelta(days=day_next_cycle))
)
start_index = end_index
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
# AB test 分件(人工维护分件)
config = SystemConfig.prefetch(SCI.AB_TEST_C2)
c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)
# 余下的单期件分给内部指定催收员id [76, 100, 106, 107, 213, 215, 216, 221, 222, 223, 226, 235]
c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)
#python库的application id
c2 = apps_ids[start_index:]
if c2:
bills = BillService().bill_list(
application_ids=c2)
else:
bills = []
#java库的bill
bill_dict = {bill['application_id']: bill for bill in bills}
logging.info('c2 AB_test length: %s' % str(c2))
gen = CycleIter(c2_bomber)
existing_list = []
for c in c2:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == c).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = 46 - application.overdue_days
DispatchAppHistory.create(
application=c,
bomber_id=bomber,
entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=(application.amount
- bill_dict[c].get('principal_paid', 0)),
entry_late_fee_pending=(
bill_dict[c].get('late_fee', 0) -
bill_dict[c].get('late_fee_paid', 0)),
expected_out_time=(date.today() + timedelta(days=day_next_cycle))
)
ab_test_other()
# 单期的件部分分给外包,内部的C1a 不用分件进入自动外呼
def c1a_dispatch_app():
today = datetime.today().date()
tomorrow = today + timedelta(days=1)
#获取单期的件
c1a_apps = (Application.select()
.where(Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value],
Application.dpd1_entry >= today,
Application.dpd1_entry < tomorrow,
Application.type == ApplicationType.CASH_LOAN.value))
all_aids = [a.id for a in c1a_apps]
# 获取外包部门
partners = (Partner.select()
.where(Partner.status == PartnerStatus.NORMAL.value,
Partner.cycle == Cycle.C1A.value))
end = 0
for p in partners:
#直接通过partner 获取bomber
bombers = (Bomber.select()
.where(Bomber.partner == p.id,
Bomber.is_del == 0))
start = end
end += int(len(all_aids) * p.app_percentage)
aids = all_aids[start:end]
bids = [b.id for b in bombers]
if not bids or not aids:
continue
# 获取每个外包应该分到的件的个数
average_number = get_average_number(len(aids),len(bids))
p_end = 0
for i,bid in enumerate(bids):
p_start = p_end
p_end += average_number[i]
b_aids = aids[p_start:p_end]
with db.atomic():
q = (Application
.update(latest_bomber = bid,
status = ApplicationStatus.AB_TEST.value)
.where(Application.id << b_aids)
.execute())
params = {
"cycle": Cycle.C1A.value,
"dest_partner_id": p.id,
"application_ids": b_aids,
"dest_bomber_id": bid
}
new_in_record(**params)
try:
dispatch_inserts = []
for aid in b_aids:
dispatch_inserts.append({'application': aid,
'bomber': bid,
'partner': p.id,
'status': DisAppStatus.NORMAL.value})
if dispatch_inserts:
q = (DispatchApp.insert_many(dispatch_inserts).execute())
except Exception as e:
logging.error("c1a分件写入dispatch_app error:%s"%str(e))
def ab_test_other():
cycle_upper = {
1: 10,
2: 30,
3: 60,
4: 76
}
c1b = (Application.select()
.where(fn.DATE(Application.C1B_entry) == date.today(),
Application.type == ApplicationType.CASH_LOAN.value)
.order_by(-Application.overdue_days)
)
c1b_id = [a.id for a in c1b]
dis_app_update = (DispatchApp.update(status=DisAppStatus.ABNORMAL.value)
.where(DispatchApp.application.in_(c1b_id)))
dis_app_update.execute()
c3 = (Application.select()
.where(fn.DATE(Application.C3_entry) == date.today(),
Application.type == ApplicationType.CASH_LOAN.value))
all_id = [b.id for b in c3]
try:
# 将C3的件一部分分配给外包
partners = (Partner.select()
.where(Partner.status == PartnerStatus.NORMAL.value,
Partner.cycle == Cycle.C3.value))
start_index, end_index, out_apps = 0, 0, {}
for p in partners:
end_index += int(len(all_id) * p.app_percentage)
out_apps[p.id] = all_id[start_index:end_index]
start_index = end_index
c3_id = all_id[end_index:]
allot_c3_case(out_apps)
except:
c3_id = all_id
config = SystemConfig.prefetch(SCI.AB_TEST_C1B, SCI.AB_TEST_C3)
c1b_bomber = config.get(SCI.AB_TEST_C1B, SCI.AB_TEST_C1B.default_value)
c3_bomber = config.get(SCI.AB_TEST_C3, SCI.AB_TEST_C3.default_value)
# 过滤掉催分期的催收员
c3_bomber = get_cash_bomber(c3_bomber, Cycle.C3.value)
data = [{'ids': c1b_id, 'bomber': c1b_bomber, 'index': 0, 'cycle': 2},
{'ids': c3_id, 'bomber': c3_bomber, 'index': 1, 'cycle': 4}]
for d in data:
applications = d.get('ids')
length = len(applications)
end = int(length * d.get('index'))
gen = CycleIter(d.get('bomber'))
existing_list = []
if not applications:
continue
bills = BillService().bill_list(
application_ids=applications)
bill_dict = {bill['application_id']: bill for bill in bills}
for a in applications[:end]:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == a).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = (cycle_upper.get(application.cycle) -
application.overdue_days)
DispatchAppHistory.create(
application=a,
bomber_id=bomber,
entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=(application.amount -
bill_dict[a]['principal_paid']),
entry_late_fee_pending=(bill_dict[a]['late_fee'] -
bill_dict[a]['late_fee_paid']),
expected_out_time=(date.today() +
timedelta(days=day_next_cycle))
)
# 根据partner表中的配置给外包团队分件。
if d.get('cycle') == Cycle.C1B.value:
c1b_wb_partner = (Partner.select()
.where(Partner.cycle == Cycle.C1B.value,
Partner.status ==
PartnerStatus.NORMAL.value))
# 获取c1b外包团队
c1b_wb_p_dict = { str(p.id):p.app_percentage for p in c1b_wb_partner}
c1b_wb_pids = list(map(int, c1b_wb_p_dict.keys()))
c1b_wb_bombers = (Bomber.select()
.where(Bomber.is_del == 0,
Bomber.partner_id << c1b_wb_pids,
Bomber.password.is_null(False)))
# 获取每个外包团队的成员和团队应分的件数
c1b_wb_pba = {}
apps_num = len(applications)
for cb in c1b_wb_bombers:
cb_key = str(cb.partner_id)
if cb_key in c1b_wb_pba:
c1b_wb_pba[cb_key]["bids"].append(cb.id)
else:
# 获取比例,计算分配给外包的件的个数
start = end
percentage = c1b_wb_p_dict.get(cb_key, 0)
end = start + ceil(apps_num * percentage)
c1b_wb_pba[cb_key] = {
"bids": [cb.id],
"pid": cb.partner_id,
"apps": applications[start:end]
}
# 获取现金贷c1b新件剩余的件
inner_c1b_apps = applications[end:]
dispatch_c1b_inner_apps(aids=inner_c1b_apps,
bills=bill_dict,
period=cycle_upper.get(Cycle.C1B.value))
for pid,c1b_wb in c1b_wb_pba.items():
c1b_wb_apps = c1b_wb["apps"]
c1b_wb_bids = c1b_wb["bids"]
average_nums = get_average_number(len(c1b_wb_apps),
len(c1b_wb_bids))
bid_end = 0
for b_index,bid in enumerate(c1b_wb_bids):
bid_start = bid_end
bid_end = bid_start + average_nums[b_index]
bid_apps = c1b_wb_apps[bid_start:bid_end]
logging.info("c1b_分件:bid:%s,bid_apps:%s"%(bid, bid_apps))
with db.atomic():
app_sql = (Application.update(latest_bomber=bid,
status=ApplicationStatus.AB_TEST.value,
ptp_bomber=None)
.where(Application.id << bid_apps))
app_sql.execute()
params = {
"apps":bid_apps,
"partner_id": int(pid),
"bill_dict": bill_dict,
"period": cycle_upper.get(Cycle.C1B.value),
"bomber_id":bid
}
c1b_dispatch_in_record(**params)
try:
for aid in bid_apps:
dispatch_inserts = {
'application': aid,
'bomber': bid,
'partner': int(pid),
'status': DisAppStatus.NORMAL.value,
}
q = (DispatchApp.update(**dispatch_inserts)
.where(DispatchApp.application == aid)
.execute())
if not q:
DispatchApp.create(**dispatch_inserts)
except Exception as e:
logging.error("dispatchApp插入失败:%s"%str(e))
def allot_c3_case(out_data):
dispatch_inserts = []
for key, value in out_data.items():
if not value:
continue
bombers = (Bomber
.filter(Bomber.partner == key,
Bomber.status == BomberStatus.OUTER.value,
Bomber.is_del == 0))
bomber_ids = [b.id for b in bombers]
bomber = CycleIter(bomber_ids)
bills = BillService().bill_list(application_ids=value)
bill_dict = {bill['application_id']: bill for bill in bills}
for v in value:
bomber_id = bomber.__next__()
q = (DispatchApp.delete()
.where(DispatchApp.application == v)
.execute())
dispatch_inserts.append({
'application': v,
'bomber': bomber_id,
'partner': key,
})
# 对数据进行备份以备数据分析
application = (Application.filter(Application.id == v)).first()
application.latest_bomber = bomber_id
application.ptp_bomber = None
application.status = ApplicationStatus.AB_TEST.value
application.save()
# c3进入下一个cycle时逾期天数为90天
day_next_cycle = (90 - application.overdue_days)
DispatchAppHistory.create(
application=v,
partner_id=key,
bomber_id=bomber_id,
entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=(
application.amount -
Decimal(bill_dict[v].get('principal_paid'))),
entry_late_fee_pending=(
Decimal(bill_dict[v].get('late_fee')) -
Decimal(bill_dict[v].get('late_fee_paid'))),
expected_out_time=(
date.today() + timedelta(days=day_next_cycle))
)
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
# 获取只催单期的催收员
def get_cash_bomber(bids, cycle):
cash_bombers = (Bomber.select()
.where(Bomber.id << bids,
Bomber.is_del == 0,
Bomber.instalment != cycle))
cash_bids = [b.id for b in cash_bombers]
return cash_bids
# c1b 单期的件分件给内部员工
def dispatch_c1b_inner_apps(aids, bills, period=30):
# 获取需要分件的员工
bombers = (Bomber.select()
.where(Bomber.role_id == 5,
Bomber.is_del == 0,
Bomber.instalment == 0))
bids = [b.id for b in bombers]
if not aids or not bids:
return
avg_num = get_average_number(len(aids),len(bids))
end = 0
with db.atomic():
for index,b in enumerate(bids):
start = end
end = start + avg_num[index]
b_aids = aids[start:end]
app_sql = (Application.update(latest_bomber=b,
status=ApplicationStatus.AB_TEST.value,
ptp_bomber=None)
.where(Application.id << b_aids))
app_sql.execute()
params = {
"apps": b_aids,
"bill_dict": bills,
"period": period,
"bomber_id": b
}
c1b_dispatch_in_record(**params)
# 将分期的件分配给员工
def dispatch_instalment_app():
cycle_list = [Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value,Cycle.M3.value]
# 获取每天,获取每个cycle没有分出去的件
for cycle in cycle_list:
apps = (Application.select()
.where(Application.cycle == cycle,
Application.latest_bomber.is_null(True),
Application.status != ApplicationStatus.REPAID.value,
(Application.type ==
ApplicationType.CASH_LOAN_STAGING.value)))
aids = [a.id for a in apps]
if not aids:
continue
# 获取指定的bomber
bombers = (Bomber.select()
.where(Bomber.is_del == 0,
Bomber.instalment == cycle))
bids = [b.id for b in bombers]
if not bids:
continue
average_nums = get_average_number(len(apps),len(bids))
end = 0
for i,bid in enumerate(bids):
start = end
end = start + average_nums[i]
bid_apps = aids[start:end]
with db.atomic():
# 更新状态
q = (Application.update(ptp_bomber = None,
latest_bomber = bid, #最新的催收员id
last_bomber = Application.latest_bomber,#前一接收的催收员
status = ApplicationStatus.AB_TEST.value)#人工维护的件
.where(Application.id << bid_apps)
.execute())
record_param = {"cycle": cycle,
"application_ids": bid_apps,
"dest_bomber_id": bid}
out_and_in_record_instalment(**record_param)
# 分期的入案和出案
def out_and_in_record_instalment(**kwargs):
if not kwargs.get("application_ids"):
return
# 先出案
out_q = (DispatchAppHistory.update(out_at = fn.NOW())
.where(DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True))
.execute())
# 入案
cycle_period = {
1: '10',
2: '30',
3: '60',
4: '90'
}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(kwargs['dest_bomber_id'])).alias('bomber_id'),
fn.NOW().alias('entry_at'),
Application.overdue_days.alias('entry_overdue_days'),
R(str(kwargs['dest_partner_id'])).alias('partner_id'),
(SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'
' t1.overdue_days) DAY)' % period))
.alias('expected_out_time'))
.where(Application.status != ApplicationStatus.REPAID.value,
Application.id << kwargs['application_ids']))
application_list = list(subquery)
for idx in range(0, len(application_list), 50):
applications = application_list[idx:idx + 50]
app_ids = [i.application_id for i in applications]
# 获取所有的overdue_bill
overdue_bills = (OverdueBill.select()
.where(OverdueBill.collection_id << app_ids))
sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]
bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)
insert_args = lambad_instalment_result(bill_list, applications)
if not insert_args:
continue
DispatchAppHistory.insert_many(insert_args).execute()
#分期入案结果格式化
def lambad_instalment_result(bill_list,applications):
bill_dict = {}
insert_args = []
# 计算入案金额
for sub_bill in bill_list:
bill_id = sub_bill["bill_id"]
principal_pending = sub_bill["amount"] - sub_bill['principal_paid']
late_fee_pending = sub_bill["late_fee"] - sub_bill["late_fee_paid"]
if bill_id in bill_dict:
bill_dict[bill_id]["entry_principal_pending"] += principal_pending
bill_dict[bill_id]["entry_late_fee_pending"] += late_fee_pending
else:
bill_dict[bill_id] = {
"entry_principal_pending": principal_pending,
"entry_late_fee_pending": late_fee_pending
}
for app in applications:
bill_entry = bill_dict.get(app.bill_id, {})
entry_principal_pending = bill_entry.get("entry_principal_pending", 0)
entry_late_fee_pending = bill_entry.get("entry_late_fee_pending", 0)
insert_dict = {
'created_at': app.created_at,
'updated_at': app.updated_at,
'application': app.application_id,
'bomber_id': app.bomber_id,
'entry_at': app.entry_at,
'entry_overdue_days': app.entry_overdue_days,
'partner_id': app.partner_id,
'expected_out_time': app.expected_out_time,
'entry_principal_pending': entry_principal_pending,
'entry_late_fee_pending': entry_late_fee_pending
}
insert_args.append(insert_dict)
return insert_args
def c1b_dispatch_in_record(**kwargs):
app_ids = kwargs.get("apps")
partner_id = kwargs.get("partner_id","null")
bill_dict = kwargs.get("bill_dict")
period = kwargs.get("period")
bomber_id = kwargs.get('bomber_id')
if not all([app_ids, partner_id, bill_dict, period]):
return False
bill_dict = { str(k):v for k,v in bill_dict.items()}
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'),
Application.overdue_days.alias('entry_overdue_days'),
R(str(partner_id)).alias('partner_id'),
(SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'
' t1.overdue_days) DAY)' % period))
.alias('expected_out_time'))
.where(Application.id << app_ids))
application_list = list(subquery)
for idx in range(0,len(application_list),1000):
applications = application_list[idx:idx+1000]
insert_args = list(map(partial(lambda_result,
dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
#获取联系的电话号码
@action(MessageAction.BOMBER_AUTO_CALL_CONTACT)
def bomber_auto_call_contact(payload, msg_id):
application_list = payload['application_list']
applications = []
for app_id in application_list:
applications.append(Application.filter(Application.id == app_id)
.first())
# 得到每个件的联系人队列
with db.atomic():
for application in applications:
cycle = application.cycle
# 修改查询时的条件
contacts = (
Contact
.select()
.where(Contact.user_id == application.user_id,
Contact.latest_status.not_in(ContactStatus.no_use()))
.order_by(-Contact.useful,
Contact.relationship,
-Contact.total_duration,
-Contact.total_count)
)
level1 = []
level2 = []
level3 = []
level = []
for c in contacts:
if c.relationship == Relationship.APPLICANT.value:
level.append(c)
elif c.relationship == Relationship.FAMILY.value:
level1.append(c)
elif c.relationship == Relationship.COMPANY.value:
level2.append(c)
elif c.relationship == Relationship.SUGGESTED.value:
level3.append(c)
contacts = level + level2 + level1 + level3
numbers = []
fc_count = 0
# Pre-check if need phone calls,校验手机号是否可以拨通
app_calls = []
need_verify = False
for eac_contact in contacts:
if (eac_contact.relationship == Relationship.FAMILY.value and
eac_contact.useful == ContactsUseful.NONE.value):
need_verify = True
break
if need_verify:
logging.info('Found contact need update. app id {}'
.format(str(application.id)))
app_calls = AuditService().phone_invalid(cat=Relationship(1).name,
application_id=application.external_id)
call_history = True
c1b_family_dict = defaultdict(list)
for c in contacts:
if c.relationship == Relationship.COMPANY.value:
if cycle == Cycle.C1A.value:
call_history = check_call_history(application)
break
if cycle == Cycle.C1B.value:
# 暂时c1b公司只打本人填写的电话
if c.source != CompanyContactType.BASIC_INFO_JOB_TEL.value:
continue
if c.relationship == Relationship.FAMILY.value:
if cycle == Cycle.C1A.value:
call_history = check_call_history(application)
break
# Update contact useful
if c.useful == ContactsUseful.NONE.value:
c.useful = check_valid_phone(app_calls, c)
c.save()
if c.useful == ContactsUseful.INVALID.value:
logging.info('Found invalid contact. {}'
.format(str(c.id)))
continue
# 需要对family类进行排序
if cycle == Cycle.C1B.value:
c1b_family_dict[c.source].append(c.number)
continue
if c.relationship == Relationship.SUGGESTED.value:
if cycle not in (Cycle.C2.value, Cycle.C3.value):
break
if cycle == Cycle.C2.value and fc_count > 10:
break
if cycle == Cycle.C3.value and fc_count > 20:
break
fc_count += 1
numbers.append(c.number)
# if cycle1 applicant is in no_use add ec
if len(numbers) == 0 or not call_history:
src_contact = (
Contact.select()
.where(Contact.user_id == application.user_id,
Contact.source in FamilyContactType.c1a_order()))
# C1A五天内催收电话没打通,按新的顺序拨打;由原来的2种变更为4种
c1a_family_dict = defaultdict(list)
for e in src_contact:
c1a_family_dict[e.source].append(e.number)
for call_type in FamilyContactType.c1a_order():
numbers.extend(c1a_family_dict[call_type])
if cycle == Cycle.C1B.value:
for call_type in FamilyContactType.c1b_order():
numbers.extend(c1b_family_dict[call_type])
numbers = list(set(numbers))
update_query = (
AutoCallList
.update(numbers=','.join(numbers))
.where(AutoCallList.application == application.id)
)
update_query.execute()
def check_valid_phone(phone_list, contact):
useful = ContactsUseful.AVAILABLE.value
for each_phone in phone_list:
if contact.number == each_phone.get('tel_no') or \
contact.number == each_phone.get('mobile_no'):
useful = ContactsUseful.INVALID.value
break
return useful
# c1a的件如果5天之内没有接通,开放ec
def check_call_history(application):
app_create_at = application.created_at + timedelta(days=4)
if datetime.today().date() > app_create_at.date():
call_actions = (CallActions.select()
.where(CallActions.type == 0,
CallActions.application == application.id,
CallActions.created_at >
(datetime.now() - timedelta(days=5))))
for call in call_actions:
if call.phone_status == PhoneStatus.CONNECTED.value:
return True
return False
return True
#当前时间与更新时间间隔超过 SCAVENGER_TIME 时间时,SCAVENGER更新状态
@action(MessageAction.BOMBER_SCAVENGER)
def scavenger(payload, msg_id):
scavenger_time = -60
scavenger = (SystemConfig.select()
.where(SystemConfig.key == 'SCAVENGER_TIME')
.first())
if scavenger and scavenger.value.isdigit():
scavenger_time = -int(scavenger.value)
update_auto_call_list = (
AutoCallList
.update(status=AutoListStatus.PENDING.value,
description='scavenger')
.where(
AutoCallList.status == AutoListStatus.PROCESSING.value,
AutoCallList.updated_at <
datetime.now() + timedelta(minutes=scavenger_time),
)
)
count = update_auto_call_list.execute()
logging.info('scavenger processed %s application', count)
# 更新自动外呼中状态是邮箱的件的状态
mail_box_scavenger_time = -30
mail_box_scavenger = (SystemConfig.select()
.where(SystemConfig.key == 'MAIL_BOX_SCAVENGER_TIME')
.first())
if mail_box_scavenger and mail_box_scavenger.value.isdigit():
mail_box_scavenger_time = -int(mail_box_scavenger.value)
update_mail_box_call_list = (
AutoCallList.update(status=AutoListStatus.PENDING.value)
.where(AutoCallList.status == AutoListStatus.MAILBOX.value,
AutoCallList.updated_at <
datetime.now() + timedelta(minutes=mail_box_scavenger_time))
)
mail_box_count = update_mail_box_call_list.execute()
logging.info("scavenger update mail box %s", mail_box_count)
# ivr中30分钟没有接收到回调,修改ivr中的状态
update_auto_ivr = (
AutoIVR
.update(status=AutoIVRStatus.AVAILABLE.value)
.where(AutoIVR.status == AutoIVRStatus.PROCESSING.value,
AutoIVR.updated_at < datetime.now() + timedelta(minutes=-30)
)
)
ivr_result = update_auto_ivr.execute()
logging.info("scavenger update %s ivr"%ivr_result)
@action(MessageAction.BOMBER_CLEAR_OVERDUE_PTP)
def bomber_clear_overdue_ptp(payload, msg_id):
# 对于C1B, C2 和 C3 不存在预测试呼出,故其ptp清除后需回到外包或ab_test
#C1B, C2,C3 件,当前时间超过承诺还款时间时,转为人工维护
update_overdue_ptp_ab = (
Application.update(
status=ApplicationStatus.AB_TEST.value,
).where(
fn.DATE(Application.promised_date) < datetime.today().date(),
Application.status == ApplicationStatus.PROCESSING.value,
Application.cycle << [Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value]
)
)
count1 = update_overdue_ptp_ab.execute()
logging.info('bomber overdue ptp for C1B C2 and C3 cleared: %s', count1)
now_and_yesterday = ((datetime.today() + timedelta(days=1)).date(),
datetime.today().date())
overdue_1a1b_cs_ptp = (CallActions
.select()
.where(fn.DATE(CallActions.promised_date)
.in_(now_and_yesterday),
CallActions.bomber_id == 72))
update_overdue_1a1b_cs_ptp = (
Application
.update(status=ApplicationStatus.UNCLAIMED.value)
.where(Application.status == ApplicationStatus.PROCESSING.value,
Application.cycle == Cycle.C1A.value,
Application.id.in_(overdue_1a1b_cs_ptp)))
logging.debug("bomber c1a c1b cs ptp: %s", update_overdue_1a1b_cs_ptp)
count2 = update_overdue_1a1b_cs_ptp.execute()
logging.info('bomber c1a c1b cs overdue ptp cleared: %s', count2)
update_overdue_ptp = (
Application
.update(
status=ApplicationStatus.UNCLAIMED.value,
).where(
fn.DATE(Application.promised_date) < datetime.today().date(),
Application.status == ApplicationStatus.PROCESSING.value,
Application.cycle == Cycle.C1A.value,
)
)
count = update_overdue_ptp.execute()
logging.info('bomber overdue ptp cleared: %s', count)
@action(MessageAction.REPORT_BOMBER_COLLECTION)
def report_bomber_collection(payload, msg_id):
start_date = (ReportCollection
.select(fn.MAX(ReportCollection.apply_date))
.scalar())
now = datetime.now()
if start_date and str(start_date) == str(now)[:10]:
return
end_date = str(now + timedelta(days=1))[:10]
start_date = str(now)[:10]
dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))
all_overdue_loan_sql1 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
GROUP BY 1;
"""
s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()
d1 = OperatedDict(s_data1)
all_overdue_loan_sql2 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE DATE(ba.follow_up_date) > CURDATE()
AND ba.called_counts = 0
GROUP BY 1;
"""
s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()
d2 = OperatedDict(s_data2)
overdue_loans_entered_into_predict_call_system_sql = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE ba.called_counts >= 1
GROUP BY 1;
"""
s_data3 = readonly_db.execute_sql(
overdue_loans_entered_into_predict_call_system_sql).fetchall()
d3 = OperatedDict(s_data3)
loans_completed_sql = """
SELECT ba.cycle, COUNT(DISTINCT ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()
d4 = OperatedDict(s_data4)
connected_calls_automatic_sql = """
SELECT ba.cycle, COUNT(ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()
d5 = OperatedDict(s_data5)
connected_calls_manual_sql = """
SELECT bb.cycle, COUNT(bb.id)
FROM bomber.bombing_history bb
WHERE DATE(bb.created_at) = curdate()
AND (bb.bomber_id < 150 OR bb.bomber_id > 200)
GROUP BY bb.cycle;
"""
s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()
d6 = OperatedDict(s_data6)
logging.info('Directly get data from database successfully.')
c1 = d1 - d2
c2 = d3
c3 = c2 / c1
c4 = d4
c5 = c4 / c2
c6 = d5
c7 = c6 / c4
c8 = d6
c9 = OperatedDict(get_agent())
c10 = (c6 + c8) / c9
try:
c11 = average_call_duration_team(start_date, end_date)
except AttributeError:
c11 = {}
lst = []
for i in range(1, 5):
lst.append({
'apply_date': start_date,
'cycle': dct[i],
'all_overdue_loan': c1.get(i, 0),
'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),
'of_overdue_loans_entered_into_predict_call_system':
round(c3.get(i, 0) * 100, 1),
'loans_completed': c4.get(i, 0),
'of_completed_loans_in_predict_call_system':
round(c5.get(i, 0) * 100, 1),
'connected_calls_automatic': c6.get(i, 0),
'connected_calls_automatic_completed_loans':
round(c7.get(i, 0), 1),
'connected_calls_manual': c8.get(i, 0),
'agent': c9.get(i, 0),
'average_calls_agent': round(c10.get(i, 0), 1),
'average_call_duration_team': round(c11.get(i, 0), 1)
})
ReportCollection.insert_many(lst).execute()
logging.info('report_bomber_collection:Done!')
@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)
def bomber_auto_call_list_record(payload, msg_id):
"""记录一年的auto_call_list,删除前一天的数据,增加今天的数据"""
now = datetime.now()
if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):
date_sql = """
SELECT DATE(created_at) FROM auto_call_list_record
GROUP BY DATE(created_at) limit 1
"""
del_date = db.execute_sql(date_sql).fetchone()[0]
del_sql = """
DELETE FROM auto_call_list_record WHERE date(created_at) = %s
"""
db.execute_sql(del_sql, [del_date])
sql = """
INSERT INTO auto_call_list_record
SELECT * FROM auto_call_list
"""
db.execute_sql(sql)
logging.info("bomber_auto_call_list_record done")
@action(MessageAction.BOMBER_MANUAL_CALL_LIST)
def bomber_manual_call_list(payload, msg_id):
"""
手动分件主要依赖
:param payload:
:param msg_id:
:return:
"""
batch_id = payload.get('batch_id')
if batch_id is None:
logging.warning('Invalid batch id')
return
query = (ManualCallList
.select()
.where(ManualCallList.batch_id == batch_id,
ManualCallList.status << ManualCallListStatus.available()))
if not query.exists():
logging.warning('Empty application id list')
return
for q in query:
application_ids = json.loads(q.application_ids or '[]')
# where
cycle = 0
where_list = [(Application.id << application_ids),
Application.latest_bomber_id == q.src_bomber_id]
src_params = json.loads(q.src_params or '{}')
if "cycle" in src_params:
where_list.append(Application.cycle == src_params['cycle'])
cycle = src_params['cycle']
if "status" in src_params:
where_list.append(Application.status == src_params['status'])
# update
update_dict = {'latest_bomber': q.dest_bomber_id}
dest_params = json.loads(q.dest_params or '{}')
if "cycle" in dest_params:
update_dict['cycle'] = dest_params['cycle']
cycle = dest_params['cycle']
if "status" in dest_params:
update_dict['status'] = dest_params['status']
with db.atomic():
try:
# update dispatch_app
if q.update_dispatch_app:
if q.dest_partner_id is None:
raise ValueError('unallowed operation')
(DispatchApp
.delete()
.where(DispatchApp.application_id.in_(application_ids))
.execute())
(DispatchApp
.insert_many([{
'application': i,
'partner': q.dest_partner_id,
'bomber': q.dest_bomber_id,
'status': DisAppStatus.NORMAL.value}
for i in application_ids])
.execute())
application_success_row = (
Application
.update(**update_dict)
.where(*where_list)
.execute()
)
if application_success_row == 0:
raise ValueError('Invalid parameter')
(ManualCallList
.update(
status=ManualCallListStatus.SUCCESS.value,
length=application_success_row)
.where(ManualCallList.id == q.id)
.execute())
out_and_in_record(
src_bomber_id=q.src_bomber_id,
application_ids=application_ids,
dest_partner_id=q.dest_partner_id,
dest_bomber_id=q.dest_bomber_id,
cycle=cycle
)
except Exception:
db.rollback()
(ManualCallList
.update(
status=ManualCallListStatus.FAILED.value,
length=0)
.where(ManualCallList.id == q.id)
.execute())
logging.error("PRINT BOMBER_MANUAL_CALL_LIST ERROR:\n%s",
traceback.format_exc())
continue
def lambda_result(item, dct):
a = str(item.application_id)
entry_principal_pending = (Decimal(item.amount or 0) -
dct[a]['principal_paid'])
entry_late_fee_pending = dct[a]['late_fee'] - dct[a]['late_fee_paid']
return {
'created_at': item.created_at,
'updated_at': item.updated_at,
'application': a,
'bomber_id': item.bomber_id,
'entry_at': item.entry_at,
'entry_overdue_days': item.entry_overdue_days,
'partner_id': item.partner_id,
'expected_out_time': item.expected_out_time,
'entry_principal_pending': entry_principal_pending,
'entry_late_fee_pending': entry_late_fee_pending
}
def out_and_in_record(**kwargs):
"""
件在催收系统的出案和入案
"""
new_out_record(**kwargs)
new_in_record(**kwargs)
def new_out_record(**kwargs):
if not kwargs['application_ids']:
return
(DispatchAppHistory
.update(out_at=fn.NOW())
.where(DispatchAppHistory.bomber_id == kwargs['src_bomber_id'],
DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True))
.execute())
# 如果是月底分件,ptp_bomber不用置空
if kwargs.get("month_dispatch"):
return
# 出案时下p的件ptp_bomber置为空
try:
(Application.update(ptp_bomber=None)
.where(Application.id << kwargs["application_ids"])
.execute())
except Exception as e:
logging.error("new_out_record error:aids:%s,error:%s" %
(kwargs["application_ids"],str(e)))
def new_in_record(**kwargs):
cycle_period = {
1: '10',
2: '30',
3: '60',
4: '90'
}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(kwargs['dest_bomber_id'])).alias('bomber_id'),
fn.NOW().alias('entry_at'),
Application.overdue_days.alias('entry_overdue_days'),
R(str(kwargs['dest_partner_id'])).alias('partner_id'),
(SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'
' t1.overdue_days) DAY)' % period))
.alias('expected_out_time'))
.where(Application.status != ApplicationStatus.REPAID.value,
Application.id << kwargs['application_ids']))
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
app_ids = [i.application_id for i in applications]
bill_list = BillService().bill_list(application_ids=app_ids)
bill_dict = {str(bill['application_id']): bill for bill in bill_list}
insert_args = list(map(partial(lambda_result,
dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
def end_old_application(old_app, paid=False):
if paid:
if old_app.status == OldLoanStatus.WAITING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return old_app.application_id
end_date = old_app.end_date
now = datetime.now()
if now >= max(end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
old_app.save()
return old_app.application_id
@action(MessageAction.UPDATE_OLD_LOAN_APPLICATION)
def update_old_loan_application(payload, msg_id):
items = (Application
.select(Application, OldLoanApplication)
.join(OldLoanApplication,
JOIN_INNER,
on=(Application.id ==
OldLoanApplication.application_id).alias('old_app'))
.where(OldLoanApplication.status
.in_(OldLoanStatus.available())))
out_list = []
for application in items:
if application.overdue_days > 90:
if application.old_app.status == OldLoanStatus.WAITING.value:
start_old_application(application.old_app)
else:
out_list.append(application.old_app)
success_list = [end_old_application(item) for item in out_list]
app_ids = list(filter(None, success_list))
if app_ids:
bomber_id = SpecialBomber.OLD_APP_BOMBER.value
out_record(src_bomber_id=bomber_id, application_ids=app_ids)
def in_record(**kwargs):
"""
:param kwargs: dist_partner_id, dist_bomber_id,
expected_out_time, application_ids
:return:
"""
# TODO: 入案记录统一
kwargs['dist_partner_id'] = kwargs.get('dist_partner_id') or 'null'
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(kwargs['dist_bomber_id'])).alias('bomber_id'),
fn.NOW().alias('entry_at'),
Application.overdue_days.alias('entry_overdue_days'),
R(str(kwargs['dist_partner_id'])).alias('partner_id'),
R('"{}"'.format(kwargs['expected_out_time']))
.alias('expected_out_time'))
.where(Application.status != ApplicationStatus.REPAID.value,
Application.id << kwargs['application_ids']))
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
app_ids = [i.application_id for i in applications]
bill_list = BillService().bill_list(application_ids=app_ids)
bill_dict = {str(bill['application_id']): bill for bill in bill_list}
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
def out_record(**kwargs):
"""
:param kwargs: src_bomber_id, application_ids
:return:
"""
# TODO: 出案记录统一
if not kwargs.get('application_ids'):
return
(DispatchAppHistory
.update(out_at=fn.NOW())
.where(DispatchAppHistory.bomber_id == kwargs['src_bomber_id'],
DispatchAppHistory.application << kwargs['application_ids'])
.execute())
# 出案时下p的件ptp_bomber置为空
try:
(Application.update(ptp_bomber=None)
.where(Application.id << kwargs["application_ids"])
.execute())
except Exception as e:
logging.error("out_record error:aids:%s,error:%s" %
(kwargs["application_ids"], str(e)))
def start_old_application(old_app, cancel=False):
application_id = old_app.application_id
if cancel and (old_app.status == OldLoanStatus.PAID.value):
now = datetime.now()
if old_app.start_date is None:
# 未进入500的池子里
old_app.status = OldLoanStatus.WAITING.value
elif now >= max(old_app.end_date, old_app.promised_date or now):
# 撤销时用户已经从500的池子出去
old_app.status = OldLoanStatus.FINISHED.value
(DispatchAppHistory
.update(out_at=max(old_app.end_date,
old_app.promised_date or now))
.where(DispatchAppHistory.bomber_id == old_app.bomber_id,
DispatchAppHistory.application == application_id)
.execute())
else:
# 还在500的池子
old_app.status = OldLoanStatus.PROCESSING.value
(DispatchAppHistory
.update(out_at=None)
.where(DispatchAppHistory.bomber_id == old_app.bomber_id,
DispatchAppHistory.application == application_id)
.execute())
old_app.save()
return
application = (
Application
.get_or_none(Application.id == application_id,
Application.status != ApplicationStatus.REPAID.value,
Application.overdue_days > 90,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) <
datetime.today().date())))
if not application:
logging.error("Can not set old application %s to start collecting",
application_id)
return
if old_app.status in OldLoanStatus.no_available():
logging.info("%s has finished or paid", old_app.application_id)
return
config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)
sp = config.get(SCI.OLD_APP_PERIOD,
SCI.OLD_APP_PERIOD.default_value)
old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value
old_app.status = OldLoanStatus.PROCESSING.value
old_app.bomber_id = old_app_bomber
old_app.start_date = datetime.now()
# 此处需要判断end_date是否已经被设置过
if not old_app.end_date:
old_app.end_date = datetime.now() + timedelta(days=sp)
old_app.save()
in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,
application_ids=[old_app.application_id],
expected_out_time=str(old_app.end_date))
@action(MessageAction.OLD_LOAN_APPLICATION)
def old_loan_application(payload, msg_id):
application_id = payload.get('application_id')
numbers = payload.get('numbers', [])
if not (application_id and numbers):
logging.error("empty application id: %s, or invalid numbers: %s",
application_id, numbers)
application = Application.get_or_none(Application.id == application_id)
if (application and
application.status == ApplicationStatus.REPAID.value):
logging.error("application %s has paid", application_id)
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
raise RuntimeError('Get golden eye user failed. {}'
.format(str(application_id)))
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
user_name = gold_app['id_name']
# 通过bill获取账单类型,如果是分期的账单不关联OldloanApplication
try:
bill = BillService().bill_dict(application_id=application_id)
except Exception:
logging.error(
'application %s get bill info failed,old_loan_application',
application_id)
return
source_contacts = (Contact
.filter(Contact.user_id == user_id,
Contact.relationship ==
Relationship.APPLICANT.value,
Contact.source ==
ApplicantSource.NEW_APPLICANT.value))
source_contact_set = {i.number for i in source_contacts}
# 如果是分期不做一下操作
if bill["category"] != ApplicationType.CASH_LOAN_STAGING.value:
# 获取已有new applicant号码
old_app = OldLoanApplication.get_or_none(
OldLoanApplication.application_id == application_id,
OldLoanApplication.status.in_(OldLoanStatus.available())
)
if not old_app:
old_app = OldLoanApplication.create(application_id=application_id,
user_id=user_id,
numbers=','.join(numbers))
else:
_numbers = old_app.numbers.split(',')
# 去重并且删除空号码
old_app.numbers = ','.join(set([nu for nu in (_numbers + numbers)
if nu]))
# 已入催件end_date + 7
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.end_date = old_app.end_date + timedelta(days=7)
old_app.save()
new_contact = set(numbers) - source_contact_set
insert_args = [{'user_id': user_id,
'name': user_name,
'number': i,
'relationship': Relationship.APPLICANT.value,
'source': ApplicantSource.NEW_APPLICANT.value,
'real_relationship': Relationship.APPLICANT.value
} for i in new_contact]
if insert_args:
Contact.insert_many(insert_args).execute()
if bill["category"] == ApplicationType.CASH_LOAN_STAGING.value:
return
start_old_application(old_app)
def run_one_sql(sql):
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
result = cursor.fetchone()[0] / 1000000
except Exception as e:
logging.info('run sql error: %s' % str(sql))
result = Decimal(0)
return result
def run_member_sql(sql):
result = [0, 0]
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
sql_result = cursor.fetchone()
if sql_result:
result = sql_result
except Exception as e:
logging.info('run sql error: %s' % str(sql))
return result
def run_all_sql(sql):
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
result = cursor.fetchall()
except Exception as e:
logging.info('run sql error: %s' % str(sql))
result = []
return result
# 得到dpd1-3的待催维度recover_rate(废弃)
def get_before_bomber(date_time):
begin_time = str(date_time - timedelta(days=7))
end_time = str(date_time)
# 得到每周一已存在的件的待催金额
old_sql = """
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at<date_add('%s',interval 1 day)
and overdue_days in (2,3)
""" % (begin_time, begin_time)
old_data = run_one_sql(old_sql)
# 得到每天新达到dpd1的待催件的金额
new_sql = """
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at> '%s'
and created_at<'%s'
and overdue_days=1;
""" % (begin_time, end_time)
new_data = run_one_sql(new_sql)
# 计算每天进入dpd4的金额
dpd4_sql = """
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>date_add('%s',interval 1 day)
and created_at< date_add('%s',interval 1 day)
and overdue_days=4;
""" % (begin_time, end_time)
dpd4_data = run_one_sql(dpd4_sql)
# 周一时的dpd2\3待还
dpd2_sql = """
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at< date_add('%s',interval 1 day)
and overdue_days in (2,3)
""" % (end_time, end_time)
dpd2_data = run_one_sql(dpd2_sql)
all_money = old_data + new_data
repayment = all_money - dpd4_data - dpd2_data
pro = 0
if all_money:
pro = (repayment / all_money) * 100
RepaymentReport.create(
time=begin_time,
cycle=0,
all_money=all_money,
proportion=pro,
repayment=repayment
)
# 每周刷新一次recover_rate报表数据(待催维度)
@action(MessageAction.RECOVER_RATE_WEEK_MONEY)
def recover_rate_week_money(payload, msg_id):
#获取当天RECOVER_RATE_WEEK_MONEY日志次数
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= date.today(),
WorkerLog.action == 'RECOVER_RATE_WEEK_MONEY')
.first())
if worker_log.logs >= 5:
return
logging.info('start cal recover_rate_week_money')
date_time = date.today()
get_every_cycle_report(date_time)
# 得到入催維度的dpd1-3的recover_rate
def get_before_bomber_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
for i in range(2, 5):
money_sql = """
select
sum(bo1.principal_pending+bo1.late_fee_pending+
bo1.interest_pending) as dpd1_pending,
sum(bo2.principal_pending+bo2.late_fee_pending+
bo2.interest_pending) as dpd4_pending
from bill_java.overdue bo1
left join dashboard.application da
on bo1.application_id=da.id
left join bill_java.overdue bo2
on bo1.application_id=bo2.application_id
and bo2.overdue_days=%s and bo2.status = 1
where bo1.overdue_days=1
and bo1.status = 1
and bo1.which_day_overdue>='%s'
and bo1.which_day_overdue<'%s'
and da.is_first_loan = %s
and bo1.stage_num is null
""" % (i, begin_date, end_date, is_first_loan)
try:
cursor = readonly_db.get_cursor()
cursor.execute(money_sql)
money = cursor.fetchone()
all_money = money[0] / 1000000
dpd4_money = money[1] / 1000000
except Exception as e:
logging.info('get all_money error: %s' % str(e))
all_money = 0
dpd4_money = 0
repayment = all_money - dpd4_money
if begin_date == date_time - timedelta(days=1):
RepaymentReportInto.create(
time=begin_date,
cycle=0,
all_money=round(all_money, 3),
proportion='0',
repayment=round(repayment, 3),
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
else:
pro = '0'
if all_money:
pro = (repayment / all_money) * 100
pro = str(round(pro, 2))
RepaymentReportInto.update(
repayment=round(repayment, 3),
proportion=pro
).where(
RepaymentReportInto.time == begin_date,
RepaymentReportInto.cycle == 0,
RepaymentReportInto.is_first_loan == is_first_loan
).execute()
end_date = begin_date
begin_date = begin_date - timedelta(days=1)
# 得到c1a入催维度的recover_rate
def get_c1a_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=19)
repayment_sql = """
select
sum(b.principal_part+b.late_fee_part) as paid_amount,
cdt
from
(select
br.principal_part, br.late_fee_part,
date(cdt) as cdt, br.repay_at, br.application_id
from (
select ba.id, ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
repayment = run_all_sql(repayment_sql)
if not repayment:
return
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C1A.value,
all_money=round(all_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
for d in repayment:
repay = d[0] / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == d[1],
RepaymentReportInto.cycle == Cycle.C1A.value,
RepaymentReportInto.is_first_loan == is_first_loan
).first()
if report:
report.repayment = round(repay, 3)
pro = (repay / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
# 得到c1b入催维度的recover_rate
def get_c1b_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,c1b_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c1b_entry >= '%s'
and ba.c1b_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
all_money = run_one_sql(all_money_sql)
not_contain_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,c1b_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c1b_entry >= '%s'
and ba.c1b_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id and bd.partner_id=5)
) a
inner join bill_java.overdue o on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=22)
repayment_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount,et
from
(select br.principal_part, br.late_fee_part,
date(a.c1b_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c1b_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c1b_entry >= '%s'
and ba.c1b_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 2
group by 4, 5) b
group by 2;
""" % (begin_date, end_date, is_first_loan)
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c1b_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c1b_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c1b_entry >= '%s'
and ba.c1b_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=5)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 2
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_repay and not repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C1B.value,
all_money=round(not_contain_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.NOT_CONTAIN.value
)
for repay in not_contain_repay:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.is_first_loan == is_first_loan,
RepaymentReportInto.contain_out == i,
RepaymentReportInto.cycle == Cycle.C1B.value
).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C1B.value,
all_money=round(all_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.is_first_loan == is_first_loan,
RepaymentReportInto.contain_out == i,
RepaymentReportInto.cycle == Cycle.C1B.value
).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
# 得到c2入催维度的recover_rate
def get_c2_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
all_money = run_one_sql(all_money_sql)
not_contain_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=37)
repayment_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_money and repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C2.value,
all_money=round(not_contain_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.NOT_CONTAIN.value
)
for repay in not_contain_repay:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.is_first_loan == is_first_loan,
RepaymentReportInto.contain_out == i,
RepaymentReportInto.cycle == Cycle.C2.value
).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C2.value,
all_money=round(all_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.is_first_loan == is_first_loan,
RepaymentReportInto.contain_out == i,
RepaymentReportInto.cycle == Cycle.C2.value
).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
# 得到c2入催维度的recover_rate
def get_c3_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id, ba.c3_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=30)
repayment_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c3_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c3_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = '%s'
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 4
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
repayment = run_all_sql(repayment_sql)
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C3.value,
all_money=round(all_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
if not repayment:
return
for repay in repayment:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.cycle == Cycle.C3.value,
RepaymentReportInto.is_first_loan == is_first_loan
).first()
if report:
report.repayment = repay_money
pro = 0
if report.all_money and int(report.all_money):
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
# 每天刷新一次recover_rate报表数据(入催维度)
@action(MessageAction.RECOVER_RATE_WEEK_MONEY_INTO)
def recover_rate_week_money_into(payload, msg_id):
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= date.today(),
WorkerLog.action == 'RECOVER_RATE_WEEK_MONEY_INTO')
.first())
if worker_log and worker_log.logs >= 5:
return
date_time = date.today()
get_before_bomber_rate(date_time)
get_c1a_into_rate(date_time)
get_c1b_into_rate(date_time)
get_c2_into_rate(date_time)
get_c3_into_rate(date_time)
# 将已经成熟的数据从未成熟改为成熟
ripe_days = {0: 3, 1: 7, 2: 20, 3: 30, 4: 30}
for i in range(0, 5):
repe_date = date.today() - timedelta(days=ripe_days[i])
(RepaymentReportInto
.update(ripe_ind=RipeInd.RIPE.value)
.where(RepaymentReportInto.time < repe_date,
RepaymentReportInto.cycle == i)
).execute()
# ----------------- 计算summary_bomber中原summary存在的指标 --------------------
# 得到基础数据
def get_static_bomber(begin_date):
active_date = begin_date - timedelta(days=8)
bombers = (BomberR
.select(BomberR.id,
BomberR.role.alias('role'),
BomberR.last_active_at.alias('active'))
.where(BomberR.last_active_at > active_date,
BomberR.role << [1, 2, 4, 5, 6, 8,9]))
summary = []
for bomber in bombers:
summary.append({
'time': begin_date,
'bomber_id': bomber.id,
'cycle': bomber.role.cycle,
'work_ind': 0
})
SummaryBomber.insert_many(summary).execute()
# 部分指标须在当天晚上计算完成
@action(MessageAction.SUMMARY_CREATE)
def summary_create(payload, msg_id):
begin_date = date.today()
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= begin_date,
WorkerLog.action == 'SUMMARY_CREATE')
.first())
if worker_log and worker_log.logs >= 5:
return
get_static_bomber(begin_date)
# 得到当天工作的员工
def get_active_bomber(begin_date):
bombers = (BomberR
.select(BomberR.id)
.where(BomberR.last_active_at >= begin_date))
for bomber in bombers:
(SummaryBomber.update(work_ind=1)
.where(SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == bomber.id)
).execute()
# 得到每个催收员每天拨打电话数和拨打件数
@time_logger
def get_call_and_made(end_date, begin_date, real_time_query=False):
call_sql = """
select
bomber_id,
count(case when relationship is not null then application_id end)
as 'call_cnt',
count(distinct case when relationship is not null then
application_id end) as 'call_case',
count(case when phone_status=4 then application_id end) as 'connect',
count(distinct case when phone_status=4 then application_id end)
as 'connect_case'
from (
select bomber_id,application_id,phone_status, cycle, relationship
from bomber.call_actions ba
where created_at>'%s' and created_at<'%s'
and type in (0, 1)
) a
group by 1
""" % (begin_date, end_date)
calls = run_all_sql(call_sql)
if real_time_query:
return calls
for call in calls:
bomber, call_cnt, case_made, connect_cnt, case_connect = call
(SummaryBomber.update(
case_made_cnt=case_made,
call_cnt=call_cnt,
call_connect_cnt=connect_cnt,
case_connect_cnt=case_connect)
.where(
SummaryBomber.bomber_id == bomber,
SummaryBomber.time == begin_date)
).execute()
return calls
# 得到每个催收员每天待催件数
@time_logger
def get_claimed_cnt(end_date, begin_date, real_time_query=False):
table_date = begin_date - timedelta(days=30)
claimed_sql = """
SELECT
COUNT( `t1`.`application_id` ) AS cnt,
`t1`.`bomber_id` AS bomber_id
FROM
`dispatch_app_history` AS t1
WHERE
( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null )
AND ( `t1`.`bomber_id` != 1000 )
AND ( `t1`.`partner_id` IS null )
AND ( `t1`.`entry_at` > '%s' )
AND ( `t1`.`entry_at` < '%s' )
GROUP BY
`t1`.`bomber_id`
""" % (begin_date, table_date, end_date)
claimeds = run_all_sql(claimed_sql)
if real_time_query:
return claimeds
for claimed in claimeds:
cnt, bomber_id = claimed
(SummaryBomber.update(claimed_cnt=cnt)
.where(SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == bomber_id)
).execute()
return claimeds
# 得到短信相关数据
def get_sms_data(end_data, begin_data):
all_sms = (ConnectHistoryR
.select(ConnectHistoryR.operator.alias('bomber_id'),
fn.COUNT(ConnectHistoryR.application).alias('sms_send'))
.where(ConnectHistoryR.created_at > begin_data,
ConnectHistoryR.created_at < end_data,
ConnectHistoryR.type.in_(ConnectType.sms()))
.group_by(ConnectHistoryR.operator))
for sms in all_sms:
(SummaryBomber.update(sms_cnt=sms.sms_send)
.where(SummaryBomber.time == begin_data,
SummaryBomber.bomber_id == sms.bomber_id)
).execute()
return all_sms
# 得到ptp相关的数据
@time_logger
def get_ptp_data(end_date, begin_date, real_query_time=False):
sql = """
SELECT
a.bomber_id,
sum( a.promised_amount ) AS ptp_amount,
count( application_id )
FROM
bomber.auto_call_actions a
LEFT JOIN bomber.bomber c ON a.bomber_id = c.id
WHERE
a.created_at >= '%s'
AND a.created_at < '%s'
AND a.promised_date != ''
GROUP BY 1
UNION
SELECT
a.bomber_id,
ifnull( sum( a.promised_amount ), 0 ) AS ptp_amount,
count( application_id )
FROM
bomber.bombing_history a
LEFT JOIN bomber.bomber c ON a.bomber_id = c.id
WHERE
bomber_id NOT BETWEEN 151
AND 177
AND bomber_id NOT BETWEEN 181
AND 183
AND bomber_id != 72
AND a.created_at >= '%s'
AND a.created_at < '%s'
AND a.promised_date != ''
GROUP BY 1
""" % (begin_date, end_date, begin_date, end_date)
ptp_datas = run_all_sql(sql)
if real_query_time:
return ptp_datas
result = {}
for ptp in ptp_datas:
bomber_id, amount, cnt = ptp
if bomber_id in result.keys():
result[bomber_id][0] += amount
result[bomber_id][1] += cnt
continue
result[bomber_id] = [amount, cnt]
for key, value in result.items():
(SummaryBomber
.update(
promised_cnt=value[1],
promised_amount=value[0]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
return ptp_datas
# 统计回款金额和回款件数
@time_logger
def get_recover_amount(end_date, begin_date, real_time_query=False):
C1_sql = """
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,
count(distinct application_id)
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,4)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
""" % (begin_date, end_date)
C1_results = run_all_sql(C1_sql)
if not real_time_query:
for C1_result in C1_results:
bomber_id, amount, cnt = C1_result
(SummaryBomber.update(
cleared_cnt=cnt,
cleared_amount=amount
).where(
SummaryBomber.bomber_id == bomber_id,
SummaryBomber.time == begin_date
)).execute()
other_sql = """
select current_bomber_id,sum(pay_amount) as pay_amount,
count(distinct application_id)
from (
select application_id,current_bomber_id,pay_amount,repay_at
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (2,3,5,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
""" % (begin_date, end_date)
sql_results = run_all_sql(other_sql)
if not real_time_query:
for sql_result in sql_results:
bomber_id, amount, cnt = sql_result
(SummaryBomber.update(
cleared_cnt=cnt,
cleared_amount=amount
).where(
SummaryBomber.bomber_id == bomber_id,
SummaryBomber.time == begin_date
)).execute()
result = sql_results + C1_results
return result
# summary 报表新数据(分布计算,先计算一部分数据)
@action(MessageAction.SUMMARY_NEW)
def summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= end_date,
WorkerLog.action == 'SUMMARY_NEW')
.first())
if worker_log and worker_log.logs >= 5:
return
get_active_bomber(begin_date)
get_call_and_made(end_date, begin_date)
get_claimed_cnt(end_date, begin_date)
get_sms_data(end_date, begin_date)
get_ptp_data(end_date, begin_date)
get_recover_amount(end_date, begin_date)
get_unfollowed(begin_date)
get_unfollowed_call(begin_date)
# ------------------------ 计算summary bomber的另部分指标 ----------------------
# 得到新件件数和金额
def get_new_case_amount(begin_date, end_date):
all_case = (DispatchAppHistoryR
.select(fn.SUM(DispatchAppHistoryR.entry_late_fee_pending +
DispatchAppHistoryR.entry_principal_pending)
.alias('pending'),
DispatchAppHistoryR.bomber_id,
fn.COUNT(DispatchAppHistoryR.application).alias('cnt'))
.where(DispatchAppHistoryR.entry_at > begin_date,
DispatchAppHistoryR.entry_at < end_date,
DispatchAppHistoryR.partner_id.is_null(True))
.group_by(DispatchAppHistoryR.bomber_id))
for case in all_case:
SummaryBomber.update(
new_case_amount_sum=case.pending,
new_case_cnt=case.cnt
).where(
SummaryBomber.bomber_id == case.bomber_id,
SummaryBomber.time == begin_date
).execute()
return all_case
# 得到KP相关数据
def get_kp_cleared(begin_date, end_date):
auto_call_sql = """
SELECT
a.current_bomber_id, count( b.application_id )
FROM
(SELECT
current_bomber_id, principal_part, late_fee_part,
repay_at, application_id
FROM
bomber.repayment_log
WHERE
repay_at >= '%s'
AND repay_at < '%s'
GROUP BY 4, 5 ) a
LEFT JOIN (
SELECT
cycle, bomber_id, promised_amount, promised_date,
application_id, created_at
FROM
bomber.auto_call_actions
WHERE
created_at >= date_sub( '%s', INTERVAL 7 DAY )
AND created_at < '%s'
AND promised_date IS NOT NULL
) b ON a.current_bomber_id = b.bomber_id
AND a.application_id = b.application_id
AND date( a.repay_at ) <= date( b.promised_date )
AND date( a.repay_at ) >= date( b.created_at )
LEFT JOIN bomber.bomber c ON a.current_bomber_id = c.id
WHERE
b.promised_date >= '%s'
GROUP BY 1
""" % (begin_date, end_date, begin_date, end_date, begin_date)
auto_call_results = run_all_sql(auto_call_sql)
manual_sql = """
SELECT
a.current_bomber_id, count( b.application_id )
FROM
(SELECT
current_bomber_id, principal_part, late_fee_part,
repay_at, application_id, created_at
FROM
bomber.repayment_log
WHERE
repay_at >= '%s'
AND repay_at < '%s'
AND principal_part + late_fee_part > 0
GROUP BY 2, 5 ) a
LEFT JOIN (
SELECT
cycle, bomber_id, promised_amount, promised_date,
application_id, created_at
FROM
bomber.bombing_history
WHERE
created_at >= date_sub( '%s', INTERVAL 7 DAY )
AND created_at < '%s'
AND promised_date IS NOT NULL
) b ON a.current_bomber_id = b.bomber_id
AND a.application_id = b.application_id
AND date( a.repay_at ) <= date( b.promised_date )
AND date( a.repay_at ) >= date( b.created_at )
LEFT JOIN bomber.bomber c ON a.current_bomber_id = c.id
WHERE
b.promised_date >= '%s'
GROUP BY 1
""" % (begin_date, end_date, begin_date, end_date, begin_date)
manual_results = run_all_sql(manual_sql)
sql_result = auto_call_results + manual_results
result = {}
for data in sql_result:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
for key, value in result.items():
(SummaryBomber
.update(
KP_cleared_cnt=value
).where(
SummaryBomber.bomber_id == key,
SummaryBomber.time == begin_date)
).execute()
# 得到当天处于ptp的件(KP率的分母)
def get_kp_today(begin_date, end_date):
sql = """
select bomber_id, count(distinct application_id)
from(
SELECT bomber_id, application_id
FROM bomber.auto_call_actions a
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where a.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))
UNION
SELECT bomber_id, application_id
FROM bomber.bombing_history b
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where b.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))) result
GROUP BY 1
""" % (begin_date, end_date, begin_date, begin_date, end_date, begin_date)
kp_today = run_all_sql(sql)
for kp in kp_today:
(SummaryBomber.update(
KP_today_cnt=kp[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == kp[0]
)).execute()
# 得到ptp相关信息(当日ptp到期件数、次日到期件数)
def get_ptp_cnt(begin_date, end_date):
today_due = []
for sql_date in (begin_date, end_date):
sql = """
select bomber_id,count(distinct application_id) as cnt from
( # 自动外呼中排除掉已经修改P期的件
select application_id,bomber_id,created_at
from bomber.auto_call_actions ba
where promised_date ='%s' # 需要过滤掉在手动中续P的
and not exists ( select 1 from bomber.bombing_history bb
where bb.application_id = ba.application_id
and bb.bomber_id = ba.bomber_id
and bb.created_at>ba.created_at
and bb.promised_date is not null
and bb.created_at < '%s')
union #历史记录,排除因为续P,导致这个件不在当日的P中
select b.application_id,b.bomber_id,a.cdt
from bomber.bombing_history b
inner join (
select application_id,bomber_id,max(created_at) as cdt
from bomber.bombing_history bb
where bb.created_at>date_sub('%s',interval 7 day)
and bb.created_at<'%s'
and promised_date is not null
group by 1,2) a
on b.application_id=a.application_id
and b.bomber_id=a.bomber_id and a.cdt=b.created_at
where b.promised_date ='%s'
union #当天下的当天的P
select b.application_id,b.bomber_id,b.created_at
from bomber.bombing_history b
where b.promised_date ='%s'
and b.created_at>'%s'
and b.created_at<date_add('%s',interval 1 day)
) a
where exists(select 1 from bomber.application ba
where ba.id=a.application_id
and ((ba.finished_at is null)
or (ba.finished_at > '%s')))
group by 1
""" % (sql_date, begin_date, begin_date, begin_date, sql_date,
sql_date, begin_date, begin_date, begin_date)
datas = run_all_sql(sql)
if sql_date == begin_date:
today_due = datas
for data in datas:
(SummaryBomber.update(
ptp_today_cnt=data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == data[0]
)).execute()
continue
nextday_due = datas
for data in datas:
(SummaryBomber.update(
ptp_next_cnt=data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == data[0]
)).execute()
return [today_due, nextday_due]
# 得到ptp维护的相关信息
def get_ptp_call_cnt(begin_date, end_date):
today_followed = []
for sql_data in (begin_date, end_date):
sql = """
select b.bomber_id,count(distinct b.application_id) as cnt
from (
select a.* from
(
select application_id,bomber_id,created_at
from bomber.auto_call_actions ba
where promised_date ='%s' # 需要过滤掉在手动中续P的
and not exists (select 1 from bomber.bombing_history bb
where bb.application_id = ba.application_id
and bb.bomber_id = ba.bomber_id
and bb.created_at>ba.created_at
and bb.promised_date is not null
and bb.created_at < '%s')
union #历史记录,排除因为续P,导致这个件不在当日的P中
select b.application_id,b.bomber_id,a.cdt
from bomber.bombing_history b
inner join (
select application_id,bomber_id,max(created_at) as cdt
from bomber.bombing_history bb
where bb.created_at>date_sub('%s',interval 7 day)
and bb.created_at<'%s'
and promised_date is not null
group by 1,2) a
on b.application_id=a.application_id
and b.bomber_id=a.bomber_id and a.cdt=b.created_at
where b.promised_date ='%s'
union #当天下的当天的P
select b.application_id,b.bomber_id,b.created_at
from bomber.bombing_history b
where b.promised_date ='%s'
and b.created_at>'%s'
and b.created_at<date_add('%s',interval 1 day)
) a
where exists(select 1 from bomber.application ba
where ba.id=a.application_id
and ((ba.finished_at is null)
or (ba.finished_at > '%s')))
and exists(select 1 from bomber.call_actions bc
where a.application_id = bc.application_id
and a.bomber_id = bc.bomber_id
and bc.created_at>'%s'
and bc.created_at< date_add('%s',interval 1 day)
and bc.created_at>=a.created_at)
union
select a.* from
(
select application_id,bomber_id,created_at
from bomber.auto_call_actions ba
where promised_date ='%s' # 需要过滤掉在手动中续P的
and not exists ( select 1 from bomber.bombing_history bb
where bb.application_id = ba.application_id
and bb.bomber_id = ba.bomber_id
and bb.created_at>ba.created_at
and bb.promised_date is not null
and bb.created_at < '%s')
union #历史记录,排除因为续P,导致这个件不在当日的P中
select b.application_id,b.bomber_id,a.cdt
from bomber.bombing_history b
inner join (
select application_id,bomber_id,max(created_at) as cdt
from bomber.bombing_history bb
where bb.created_at>date_sub('%s',interval 7 day)
and bb.created_at<'%s'
and promised_date is not null
group by 1,2) a
on b.application_id=a.application_id
and b.bomber_id=a.bomber_id and a.cdt=b.created_at
where b.promised_date ='%s'
union #当天下的当天的P
select b.application_id,b.bomber_id,b.created_at
from bomber.bombing_history b
where b.promised_date ='%s'
and b.created_at>'%s'
and b.created_at<date_add('%s',interval 1 day)
) a
where exists(select 1 from bomber.application ba
where ba.id=a.application_id
and ba.finished_at > '%s'
and ba.finished_at< date_add('%s',interval 1 day))
) b
group by 1
""" % (sql_data, begin_date, begin_date, begin_date, sql_data,
sql_data, begin_date, begin_date, begin_date, begin_date,
begin_date, sql_data, begin_date, begin_date, begin_date,
sql_data, sql_data, begin_date, begin_date, begin_date,
begin_date)
datas = run_all_sql(sql)
if sql_data == begin_date:
today_followed = datas
for data in datas:
(SummaryBomber.update(
ptp_today_call_cnt=data[1]
).where(
SummaryBomber.bomber_id == data[0],
SummaryBomber.time == begin_date
)).execute()
continue
nextday_followed = datas
for data in datas:
(SummaryBomber.update(
ptp_next_call_cnt=data[1]
).where(
SummaryBomber.bomber_id == data[0],
SummaryBomber.time == begin_date
)).execute()
return [today_followed, nextday_followed]
# 得到新件还款金额(只有c2、c3才有新件还款的概念)
def get_new_case_cleared(begin_date, end_date):
sql = """
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c2_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c3_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
) a
GROUP BY 1
""" % (begin_date, end_date, begin_date, end_date,begin_date, end_date)
case_cleared_sums = run_all_sql(sql)
for clear in case_cleared_sums:
(SummaryBomber.update(
new_case_cleared_sum=clear[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == clear[0]
)).execute()
# 新件当日维护件数
@time_logger
def get_new_case_call(begin_date, end_date, real_query_time=False):
sql = """
SELECT
bd.bomber_id,
count( DISTINCT bd.application_id )
FROM
bomber.dispatch_app_history bd
INNER JOIN bomber.call_actions bc
ON bd.application_id = bc.application_id
AND bd.bomber_id = bc.bomber_id
AND date( bd.entry_at ) = date( bc.created_at )
WHERE
entry_at > '%s'
AND entry_at < '%s'
AND partner_id IS NULL
GROUP BY 1
""" % (begin_date, end_date)
new_case_calls = run_all_sql(sql)
if real_query_time:
return new_case_calls
for call in new_case_calls:
(SummaryBomber.update(
new_case_call_cnt=call[1]
).where(
SummaryBomber.bomber_id == call[0],
SummaryBomber.time == begin_date
)).execute()
return new_case_calls
# 得到接通件均通话时长
@time_logger
def get_calltime_avg(begin_date, end_date, real_query_time=False):
autos_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction,
count( 1 ) AS auto_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
GROUP BY 1
""" % (begin_date, end_date)
autos = run_all_sql(autos_sql)
manual_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
AND an.recording is not null
GROUP BY 1
""" % (begin_date, end_date, '5%', '3%')
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]][0] += data[1]
result[data[0]][1] += data[2]
continue
result[data[0]] = [data[1], data[2]]
if real_query_time:
return result
for key, value in result.items():
(SummaryBomber.update(
calltime_case_sum=value[0],
calltime_case_cnt=value[1],
calltime_case_avg=value[0] / value[1] if value[1] else 0
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
return result
# 得到等待时长相关数据
def get_no_calltime_avg(begin_date, end_date):
manual_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND (an.status!='ANSWERED' or an.recording is null)
AND bb.id IS NOT NULL
GROUP BY 1
""" % (begin_date, end_date, '5%', '3%')
manuals = run_all_sql(manual_sql)
for data in manuals:
(SummaryBomber.update(
calltime_no_case_sum=data[1],
calltime_no_case_cnt=data[2],
calltime_no_case_avg=data[1] / data[2] if data[2] else 0
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == data[0]
)).execute()
# 得到通话总时长
@time_logger
def get_calltime_sum(begin_date, end_date, real_query_time=False):
autos_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND bb.id IS NOT NULL
GROUP BY 1
""" % (begin_date, end_date)
autos = run_all_sql(autos_sql)
manual_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND bb.id IS NOT NULL
GROUP BY 1
""" % (begin_date, end_date, '5%', '3%')
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
if real_query_time:
return result
for key, value in result.items():
(SummaryBomber.update(
calltime_sum=value
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
return result
# 当天未跟进的件
def get_unfollowed(begin_date):
sql = """
SELECT
bomber_id,
count(1)
FROM
(
SELECT
bd.application_id,
date(bd.entry_at) AS entry_at,
bd.bomber_id,
date(bd.out_at) AS out_at
FROM
bomber.dispatch_app_history bd
WHERE
(
out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)
OR out_at IS NULL
)
AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)
AND partner_id IS NULL
AND NOT EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
bd.bomber_id = bc.bomber_id
AND bc.application_id = bd.application_id
AND bc.created_at < '%(begin_date)s'
)
) a
GROUP BY
1
""" % {'begin_date': begin_date}
data = run_all_sql(sql)
result = defaultdict(int)
for d in data:
result[d[0]] += d[1]
bomber_list = []
for key, value in result.items():
bomber_list.append(key)
(SummaryBomber.update(
unfollowed_cnt=SummaryBomber.new_case_cnt + value
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
# 剩下bomber_id直接由new_case_cnt赋值
(SummaryBomber.update(
unfollowed_cnt=SummaryBomber.new_case_cnt
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id.not_in(bomber_list)
)).execute()
# 未跟进件中当天跟进件数
def get_unfollowed_call(begin_date):
sql = """
SELECT
bomber_id,
count(1)
FROM
(
SELECT
bd.application_id,
date(bd.entry_at) AS entry_at,
bd.bomber_id,
date(bd.out_at) AS out_at
FROM
bomber.dispatch_app_history bd
WHERE
(
out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)
OR out_at IS NULL
)
AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)
AND partner_id IS NULL
AND NOT EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
bd.bomber_id = bc.bomber_id
AND bc.application_id = bd.application_id
AND bc.created_at < '%(begin_date)s'
)
) a
WHERE
EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
a.application_id = bc.application_id
AND a.bomber_id = bc.bomber_id
AND bc.created_at > '%(begin_date)s'
AND bc.created_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND bc.created_at >= a.entry_at
)
OR EXISTS (
SELECT
1
FROM
bomber.application ba
WHERE
ba.id = a.application_id
AND ba.finished_at > '%(begin_date)s'
AND ba.finished_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
)
GROUP BY
1
""" % {'begin_date': begin_date}
data = run_all_sql(sql)
result = defaultdict(int)
for d in data:
result[d[0]] += d[1]
bomber_list = []
for key, value in result.items():
bomber_list.append(key)
(SummaryBomber.update(
unfollowed_call_cnt=SummaryBomber.new_case_call_cnt + value
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
# 剩下bomber_id直接由new_case_cnt赋值
update_sql = (SummaryBomber
.update(unfollowed_call_cnt=SummaryBomber.new_case_call_cnt)
.where(SummaryBomber.time == begin_date))
if bomber_list:
update_sql = update_sql.where(SummaryBomber.bomber_id
.not_in(bomber_list))
update_sql.execute()
return result
# summary 更新新的数据(计算summary_bomber的另一部分数据)
@action(MessageAction.UPDATE_SUMMARY_NEW)
def update_summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= end_date,
WorkerLog.action == 'UPDATE_SUMMARY_NEW')
.first())
if worker_log and worker_log.logs >= 5:
return
get_new_case_amount(begin_date, end_date)
get_kp_cleared(begin_date, end_date)
get_kp_today(begin_date, end_date)
get_ptp_cnt(begin_date, end_date)
get_ptp_call_cnt(begin_date, end_date)
get_new_case_cleared(begin_date, end_date)
get_new_case_call(begin_date, end_date)
get_calltime_avg(begin_date, end_date)
get_no_calltime_avg(begin_date, end_date)
get_calltime_sum(begin_date, end_date)
# -------------------------------- 得到cycle层的数据 --------------------------
def get_cycle_claimed(begin_date, end_date):
sql = """
select cycle,count(1)
from bomber.application where cycle in (1,2,3,4)
and (finished_at is null or (finished_at>'%s'))
and created_at>'2018-09-01'
group by 1
""" % begin_date
result = run_all_sql(sql)
return result
# 得到cycle层的新件件数和金额
@time_logger
def cycle_new_case(begin_date, end_date, real_time_query=False):
sql = """
SELECT
1 AS cycle,
count( ba.id ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.created_at ) = bo.which_day_overdue
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c1b_entry ) = bo.which_day_overdue
WHERE
c1b_entry > '%s'
AND c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c2_entry ) = bo.which_day_overdue
WHERE
c2_entry > '%s'
AND c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c3_entry ) = bo.which_day_overdue
WHERE
c3_entry > '%s'
AND c3_entry < '%s'
""" % (begin_date, end_date, begin_date, end_date,
begin_date, end_date, begin_date, end_date)
all_datas = run_all_sql(sql)
if real_time_query:
return all_datas
for data in all_datas:
(SummaryBomber.update(
new_case_amount_sum=data[2],
new_case_cnt=data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == data[0],
SummaryBomber.cycle == data[0]
)).execute()
return all_datas
# 新件当日维护件数
@time_logger
def get_cycle_new_case_call(begin_date, end_date, real_time_query=False):
sql = """
SELECT
1 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.created_at ) = date( bc.created_at )
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c1b_entry ) = date( bc.created_at )
WHERE
ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c2_entry ) = date( bc.created_at )
WHERE
ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c3_entry ) = date( bc.created_at )
WHERE
ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
""" % (begin_date, end_date, begin_date, end_date,
begin_date, end_date, begin_date, end_date)
cycle_datas = run_all_sql(sql)
if real_time_query:
return cycle_datas
for data in cycle_datas:
(SummaryBomber.update(
new_case_call_cnt=data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.cycle == data[0],
SummaryBomber.bomber_id == data[0]
)).execute()
return cycle_datas
def get_cycle_new_case_cleared(begin_date, end_date):
sql = """
SELECT
'1' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.created_at ) = date( br.repay_at )
WHERE ba.created_at > '%s'
AND ba.created_at < '%s'
GROUP BY 1, 2 ) a
UNION
SELECT
'2' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
GROUP BY 1, 2) a
""" % (begin_date, end_date, begin_date, end_date)
cycle_cleared = run_all_sql(sql)
for i in cycle_cleared:
(SummaryBomber.update(
new_case_cleared_sum=i[2]
).where(
SummaryBomber.cycle == i[0],
SummaryBomber.bomber_id == i[0],
SummaryBomber.time == begin_date
)).execute()
def get_cycle_case_made_cnt(begin_date, end_date):
sql = """
select cycle,count(distinct application) from (
select distinct cycle,application from bomber.auto_call_list_record
where created_at >= '%s'
and created_at < '%s'
and called_counts <> 0
and cycle in (1,2,3,4)
union
select distinct cycle,application_id from bomber.call_actions
where created_at >= '%s'
and created_at < '%s'
and cycle in (1,2,3,4)
) c
group by 1
""" % (begin_date, end_date, begin_date, end_date)
case_made_datas = run_all_sql(sql)
for case_made_data in case_made_datas:
(SummaryBomber.update(
case_made_cnt=case_made_data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.cycle == case_made_data[0],
SummaryBomber.bomber_id == case_made_data[0]
)).execute()
# 得到cycle維度的数据
@action(MessageAction.SUMMARY_NEW_CYCLE)
def summary_new_cycle(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= end_date,
WorkerLog.action == 'SUMMARY_NEW_CYCLE')
.first())
if worker_log and worker_log.logs >= 5:
return
cycle_datas = (SummaryBomber
.select(fn.SUM(SummaryBomber.new_case_amount_sum)
.alias('new_case_amount_sum'),
fn.SUM(SummaryBomber.new_case_cleared_sum)
.alias('new_case_cleared_sum'),
fn.SUM(SummaryBomber.case_made_cnt)
.alias('case_made_cnt'),
fn.SUM(SummaryBomber.case_connect_cnt)
.alias('case_connect_cnt'),
fn.SUM(SummaryBomber.promised_cnt)
.alias('promised_cnt'),
fn.SUM(SummaryBomber.promised_amount)
.alias('promised_amount'),
fn.SUM(SummaryBomber.cleared_cnt)
.alias('cleared_cnt'),
fn.SUM(SummaryBomber.cleared_amount)
.alias('cleared_amount'),
fn.SUM(SummaryBomber.new_case_cnt)
.alias('new_case_cnt'),
fn.SUM(SummaryBomber.new_case_call_cnt)
.alias('new_case_call_cnt'),
fn.SUM(SummaryBomber.unfollowed_cnt)
.alias('unfollowed_cnt'),
fn.SUM(SummaryBomber.unfollowed_call_cnt)
.alias('unfollowed_call_cnt'),
fn.SUM(SummaryBomber.call_cnt).alias('call_cnt'),
fn.SUM(SummaryBomber.sms_cnt).alias('sms_cnt'),
fn.SUM(SummaryBomber.call_connect_cnt)
.alias('call_connect_cnt'),
fn.SUM(SummaryBomber.ptp_today_cnt)
.alias('ptp_today_cnt'),
fn.SUM(SummaryBomber.ptp_today_call_cnt)
.alias('ptp_today_call_cnt'),
fn.SUM(SummaryBomber.ptp_next_cnt)
.alias('ptp_next_cnt'),
fn.SUM(SummaryBomber.ptp_next_call_cnt)
.alias('ptp_next_call_cnt'),
fn.SUM(SummaryBomber.KP_cleared_cnt)
.alias('KP_cleared_cnt'),
fn.SUM(SummaryBomber.KP_today_cnt)
.alias('KP_today_cnt'),
fn.SUM(SummaryBomber.work_ind).alias('work_ind'),
fn.SUM(SummaryBomber.calltime_sum)
.alias('calltime_sum'),
fn.SUM(SummaryBomber.calltime_case_sum)
.alias('calltime_case_sum'),
fn.SUM(SummaryBomber.calltime_case_cnt)
.alias('calltime_case_cnt'),
fn.SUM(SummaryBomber.calltime_no_case_sum)
.alias('calltime_no_case_sum'),
fn.SUM(SummaryBomber.calltime_no_case_cnt)
.alias('calltime_no_case_cnt'),
SummaryBomber.cycle.alias('cycle'))
.where(SummaryBomber.time == begin_date,
SummaryBomber.cycle << Cycle.values())
.group_by(SummaryBomber.cycle))
for cycle_data in cycle_datas:
SummaryBomber.create(
bomber_id=cycle_data.cycle,
time=begin_date,
cycle=cycle_data.cycle,
new_case_amount_sum=cycle_data.new_case_amount_sum, # 新件金额(同上)
new_case_cleared_sum=cycle_data.new_case_cleared_sum, # 新件还款(同上)
new_case_cleard_rate=0,
case_made_cnt=cycle_data.case_made_cnt, # 拨打件数
case_made_rate=0,
case_connect_cnt=cycle_data.case_connect_cnt, # 接通件数
case_connect_rate=0,
promised_cnt=cycle_data.promised_cnt, # ptp件数
promised_amount=cycle_data.promised_amount, # ptp金额
cleared_cnt=cycle_data.cleared_cnt, # 回款件数
cleared_amount=cycle_data.cleared_amount, # 回款金额
new_case_cnt=cycle_data.new_case_cnt, # 新件数量(1,2待算)
new_case_call_cnt=cycle_data.new_case_call_cnt, # 新件拨打数(同上)
unfollowed_cnt=cycle_data.unfollowed_cnt,
unfollowed_call_cnt=cycle_data.unfollowed_call_cnt,
call_cnt=cycle_data.call_cnt, # 拨打电话数
sms_cnt=cycle_data.sms_cnt, # 发送短信数
call_connect_cnt=cycle_data.call_connect_cnt, # 接通电话数
calltime_case_avg=0, # 接通件均通话时长 (全部待算)
ptp_today_cnt=cycle_data.ptp_today_cnt, # 当日ptp件数
ptp_today_call_cnt=cycle_data.ptp_today_call_cnt, # 当日ptp到期维护件数
ptp_next_cnt=cycle_data.ptp_next_cnt, # 次日ptp到期数
ptp_next_call_cnt=cycle_data.ptp_next_call_cnt, # 次日到期维护数
KP_cleared_cnt=cycle_data.KP_cleared_cnt, # kp回款件
KP_today_cnt=cycle_data.KP_today_cnt, # 当日处于ptp件数
KP_cleared_rate=0,
work_ind=cycle_data.work_ind, # 当日是否工作
calltime_sum=cycle_data.calltime_sum, # 通话总时长
calltime_case_sum=cycle_data.calltime_case_sum,
calltime_case_cnt=cycle_data.calltime_case_cnt,
calltime_no_case_sum=cycle_data.calltime_no_case_sum,
calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,
work_time_sum=cycle_data.work_time_sum # 工作时长
)
cycle_claimed = get_cycle_claimed(begin_date, end_date)
for claimed in cycle_claimed:
(SummaryBomber.update(
claimed_cnt=claimed[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.cycle == claimed[0],
SummaryBomber.bomber_id == claimed[0]
)).execute()
# 得到新件件数和金额
cycle_new_case(begin_date, end_date)
# 得到新件维护件数
get_cycle_new_case_call(begin_date, end_date)
# 得到新件還款金額
get_cycle_new_case_cleared(begin_date, end_date)
# 修改cycle的拨打件数(累加对于预测试外呼都是打通的)
get_cycle_case_made_cnt(begin_date, end_date)
# 得到计算类数据(各比率)
all_datas = (SummaryBomber.filter(SummaryBomber.time == begin_date))
for data in all_datas:
cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum
if data.new_case_amount_sum else 0) * 100
data.new_case_cleard_rate = cl_rat
case_made_rate = (data.case_made_cnt / data.claimed_cnt
if data.claimed_cnt else 0) * 100
data.case_made_rate = case_made_rate
case_connect_rate = (data.case_connect_cnt / data.case_made_cnt
if data.case_made_cnt else 0) * 100
data.case_connect_rate = case_connect_rate
calltime_case_avg = (data.calltime_case_sum / data.calltime_case_cnt
if data.calltime_case_cnt else 0)
data.calltime_case_avg = calltime_case_avg
calltime_no_case_avg = (data.calltime_no_case_sum /
data.calltime_no_case_cnt
if data.calltime_no_case_cnt else 0)
data.calltime_no_case_avg = calltime_no_case_avg
KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt
if data.KP_today_cnt else 0) * 100
data.KP_cleared_rate = KP_cleared_rate
data.save()
@action(MessageAction.MODIFY_BILL)
def modify_bill(payload, msg_id):
application_id = payload.get('external_id')
principal_paid = Decimal(payload.get('principal_paid', 0))
late_fee = Decimal(payload.get('late_fee', 0))
late_fee_paid = Decimal(payload.get('late_fee_paid', 0))
overdue_days = payload.get('overdue_days')
sub_bill_id = payload.get('bill_sub_id')
partner_bill_id = payload.get('partner_bill_id')
if not application_id:
logging.warning('payload has no external_id. {}'.format(str(payload)))
return
if not overdue_days:
logging.info("application %s not overdue" % application_id)
return
item = (OldLoanApplication
.get_or_none(OldLoanApplication.application_id ==
application_id))
if item:
start_old_application(item, cancel=True)
overdue_bill = (OverdueBill.select()
.where(OverdueBill.external_id == application_id,
OverdueBill.sub_bill_id == sub_bill_id)
.first())
application = (Application.filter(Application.id == application_id)
.first())
if not overdue_bill:
if not application:
logging.info('application %s not in bomber, let it in bomber now',
application_id)
send_to_default_q(MessageAction.APPLICATION_BOMBER, {
'id': application_id,
'bill_sub_id': sub_bill_id
})
return
else:
application = (Application
.filter(Application.id == overdue_bill.collection_id)
.first())
with db.atomic():
application.status = ApplicationStatus.UNCLAIMED.value
application.finished_at = None
application.paid_at = None
application.save()
if overdue_bill:
overdue_bill.status = ApplicationStatus.UNCLAIMED.value
overdue_bill.finished_at = None
overdue_bill.save()
repayment = (RepaymentLog.update(no_active = 1)
.where(RepaymentLog.application == application.id,
RepaymentLog.partner_bill_id == partner_bill_id,
RepaymentLog.overdue_bill_id == overdue_bill.id))
else:
repayment = (RepaymentLog.update(no_active=1)
.where(RepaymentLog.application == application.id,
RepaymentLog.partner_bill_id == partner_bill_id))
repayment_num = repayment.execute()
logging.info("modify_bill no active repayment count:%s" % repayment_num)
if not application.latest_bomber_id:
return
bomber_id = application.latest_bomber_id
(DispatchAppHistory.update(
out_at=None,
out_overdue_days=overdue_days,
out_principal_pending=(application.amount - principal_paid),
out_late_fee_pending=(late_fee - late_fee_paid)
).where(
DispatchAppHistory.application == application.id,
DispatchAppHistory.bomber_id == bomber_id)).execute()
# 获取改变的ids
def get_change_bomber():
cycle_role_map = {5: Cycle.C1B.value, 6: Cycle.C2.value, 8: Cycle.C3.value}
result = {}
bomber_logs = (BomberLog.select(BomberLog.bomber_id,
BomberLog.role_id,
BomberLog.operation,
Bomber.group_id)
.join(Bomber, JOIN_INNER,
on=BomberLog.bomber_id == Bomber.id)
.where(fn.DATE(BomberLog.created_at) == date.today(),
BomberLog.role_id << list(cycle_role_map.keys()),#C1b,c2,c3
BomberLog.operation << (0, 1), #0删除,1创建,3修改
Bomber.instalment == 0) #催收单期的员工
.dicts())
for b_log in bomber_logs:
cycle = cycle_role_map.get(b_log["role_id"])
group_id = b_log["group_id"]
if cycle in result:
if group_id not in result[cycle]:
result[cycle][group_id] = {
"cycle": cycle,
"del_ids": [],
"new_ids": []
}
else:
result[cycle] = {group_id: {
"cycle": cycle,
"del_ids": [],
"new_ids": []}
}
if b_log["operation"] == 0:
result[cycle][group_id]["del_ids"].append(b_log["bomber_id"])
# result 有值表示有人员变动
if result:
bombers = (Bomber.select()
.where(Bomber.role.in_(list(cycle_role_map.keys())),
Bomber.is_del == 0,
Bomber.instalment == 0))
for b in bombers:
cycle_result = result.get(cycle_role_map[b.role_id], {})
role_result = cycle_result.get(b.group_id)
if not role_result:
continue
role_result["new_ids"].append(b.id)
resutl_list = []
for cycle, group_dict in result.items():
resutl_list.extend(list(group_dict.values()))
return resutl_list
return []
# 获取所有的application
def get_total_application(cycle, del_ids, new_ids,
type=ApplicationType.CASH_LOAN.value):
bomber_list = del_ids + new_ids
all_apps = (Application.select(Application.id,
Application.latest_bomber_id.alias(
"latest_bomber_id"),
Application.promised_date,
Bomber.partner_id.alias("partner_id"))
.join(Bomber, JOIN_LEFT_OUTER,
Application.latest_bomber == Bomber.id)
.where(Application.cycle == cycle,
Application.status != ApplicationStatus.REPAID.value,
Application.latest_bomber_id << bomber_list,
Application.type == type)
.order_by(Application.id)
.dicts())
return all_apps
# 获取平均数列表,即每个bomber的平均件的数量
def get_average_number(app_nums, bomber_nums):
average = app_nums // bomber_nums
remainder = app_nums % bomber_nums
average_list = [average for i in range(bomber_nums)]
if remainder == 0:
return average_list
for i in range(remainder):
average_list[i] += 1
# 对结果进行一下随机,不然每次都是前几个人多件
random.shuffle(average_list)
return average_list
# 对appliciton进行分类统计
def classified_statistic_apps(apps):
result = {}
# 根据用户的bomber_id 对数据进行分类统计
for app in apps:
# 将用户下p和没下p的件分开
latest_bomber_id = app["latest_bomber_id"]
if latest_bomber_id not in result:
result[latest_bomber_id] = {
"bid":latest_bomber_id,
"p_list": [],
"np_list": [],
"partner_id": app["partner_id"] if app["partner_id"] else "",
}
promised_date = app.get("promised_date")
if not promised_date or promised_date.date() < date.today():
result[latest_bomber_id]['np_list'].append(app["id"])
else:
result[latest_bomber_id]['p_list'].append(app["id"])
return result
# 获取多余的件,并且计算每个人所需要的件
def get_surplus_application(new_ids, del_ids, average_nums, classified_apps):
surplus_apps = []
# 如果id在删除队列中,将对应id所有的件重新分配
for del_id in del_ids:
del_res = classified_apps.get(del_id,{})
p_list = del_res.get("p_list", [])
np_list = del_res.get("np_list", [])
del_res["need_num"] = -(len(p_list) + len(np_list))
del_res["to_list"] = np_list + p_list
surplus_apps.extend(p_list)
surplus_apps.extend(np_list)
# 计算每个用户的下p和没下p的件的个数,和自己需要的件的个数
for index, bid in enumerate(new_ids):
average = average_nums[index]
bomber_app = classified_apps.get(bid)
if not bomber_app:
# 获取partner_id
bomber = (Bomber.select(Bomber.partner_id)
.where(Bomber.id == bid)
.first())
bomber_app = {
"bid": bid,
"p_list": [],
"p_num": 0,
"np_list": [],
"np_num": 0,
"need_num": average,
"partner_id": bomber.partner_id if bomber else ''
}
classified_apps[bid] = bomber_app
else:
p_num = len(bomber_app["p_list"])
np_num = len(bomber_app["np_list"])
# 如果下p件大于平均值,直接将他剩余所有件都放入到多余列表中
if p_num > average:
bomber_app["need_num"] = - np_num
else:
bomber_app["need_num"] = average - (p_num + np_num)
bomber_app["p_num"] = p_num
bomber_app["np_num"] = np_num
# 将多余的件放入到多余列表中
if bomber_app["need_num"] < 0:
# 将件随机,确保分件的逾期天数尽量均匀
random.shuffle(bomber_app["np_list"])
res_over = bomber_app["np_list"][:-bomber_app["need_num"]]
bomber_app["to_list"] = res_over
surplus_apps.extend(res_over)
# 按照need_num进行排序
classified_apps_list = sorted(classified_apps.values(),
key=lambda x:x["need_num"],
reverse=True)
return surplus_apps, classified_apps_list
# 更新数据库数据,进行分件
def update_applications(surplus_apps, classified_apps, cycle):
# 多余得件进行随机
random.shuffle(surplus_apps)
for app in classified_apps:
status = 0
try:
if app["need_num"] > 0:
from_list = surplus_apps[:app["need_num"]]
# 移除surplus_apps中的元素
for i in from_list: surplus_apps.remove(i)
app["from_list"] = from_list
with db.atomic():
q = Application.update(
{Application.latest_bomber_id: app["bid"]}).where(
Application.id.in_(from_list))
q.execute()
# 分件入案
in_record_params = {
"dest_bomber_id": app["bid"],
"application_ids": from_list,
"dest_partner_id": app["partner_id"],
"cycle": cycle,
}
new_in_record(**in_record_params)
status = 1
elif app["need_num"] < 0:
#分件出案
out_record_params = {
"src_bomber_id": app["bid"],
"application_ids": app["to_list"]
}
new_out_record(**out_record_params)
status = 1
else:
status = 1
except Exception as e:
logging.error("分件异常,params:%s,error:%s"%(app,str(e)))
#记录操作日志
log_params = {
"bomber_id": app["bid"],
"form_ids": json.dumps(app.get("from_list", [])),
"to_ids": json.dumps(app.get("to_list", [])),
"need_num": app.get("need_num"),
"np_ids": json.dumps(app.get("np_list", [])),
"p_ids": json.dumps(app.get("p_list", [])),
"status": status
}
DispatchAppLogs.create(**log_params)
return classified_apps
# 人员变动分配分期的催收单
def get_instalment_change_bomber():
result ={}
bomber_logs = (BomberLog.select(BomberLog.bomber_id,
BomberLog.operation,
Bomber.instalment,
Bomber.group_id)
.join(Bomber, JOIN_INNER,
on=BomberLog.bomber_id == Bomber.id)
.where(fn.DATE(BomberLog.created_at) == date.today(),
BomberLog.operation << [0,1],
Bomber.instalment > 0)
.dicts())
for bl in bomber_logs:
cycle = bl["instalment"]
group_id = bl["group_id"]
if cycle not in result:
result[cycle] = {group_id: {
"cycle": cycle,
"del_ids": [],
"new_ids": []
}}
else:
if group_id not in result[cycle]:
result[cycle][group_id] = {
"cycle": cycle,
"del_ids": [],
"new_ids": []}
if bl["operation"] == 0:
result[cycle][group_id]["del_ids"].append(bl["bomber_id"])
if result:
instalments = list(result.keys())
bombers = (Bomber.select()
.where(Bomber.instalment << instalments,
Bomber.is_del == 0))
for b in bombers:
cycle_result = result.get(b.instalment, {})
group_result = cycle_result.get(b.group_id)
if not group_result:
continue
group_result["new_ids"].append(b.id)
result_list = []
for cycle,group_dict in result.items():
result_list.extend(list(group_dict.values()))
return result_list
return []
def instalment_update_applications(surplus_apps, classified_apps, cycle):
end = 0
for app in classified_apps:
if app["need_num"] <= 0:
continue
start = end
end = start + app["need_num"]
aids = surplus_apps[start:end]
app["from_list"] = aids
status = 0
with db.atomic():
q = (Application.update(last_bomber = Application.latest_bomber,
latest_bomber = app["bid"],
ptp_bomber = None)
.where(Application.id << aids)
.execute())
# 入案和出案
record_param = {
"cycle": cycle,
"application_ids": aids,
"dest_bomber_id": app["bid"],
"dest_partner_id": app["partner_id"],
}
out_and_in_record_instalment(**record_param)
status = 1
# 记录操作日志
log_params = {
"bomber_id": app["bid"],
"form_ids": json.dumps(app.get("from_list", [])),
"to_ids": json.dumps(app.get("to_list", [])),
"need_num": app.get("need_num"),
"np_ids": json.dumps(app.get("np_list", [])),
"p_ids": json.dumps(app.get("p_list", [])),
"status": status
}
DispatchAppLogs.create(**log_params)
return classified_apps
# 执行人员变动分件
def change_bomber_dispatch_apps(change_bombers,
type=ApplicationType.CASH_LOAN.value):
if not change_bombers:
return
for bombers in change_bombers:
del_ids = bombers.get("del_ids", [])
new_ids = bombers.get("new_ids", [])
cycle = bombers.get("cycle")
if not all([new_ids, cycle]):
logging.info(
"获取需要分件的信息异常,bomber:%s,type:%s" % (bombers, type))
continue
# 获取总apps
apps = get_total_application(cycle, del_ids, new_ids, type)
if not apps:
logging.info(
"分件没有获取到对应的件,bomber:%s,type:%s" % (bombers, type))
continue
# 获取平均数列表
average_nums = get_average_number(len(apps), len(new_ids))
# 分类统计apps
classified_apps = classified_statistic_apps(apps)
# 计算每个人需要分的件和多余的件
superlus_apps, classified_apps = get_surplus_application(new_ids,
del_ids,
average_nums,
classified_apps)
# 分件,更新数据库
if type == ApplicationType.CASH_LOAN.value:
result = update_applications(superlus_apps, classified_apps, cycle)
elif type == ApplicationType.CASH_LOAN_STAGING.value:
result = instalment_update_applications(superlus_apps,
classified_apps,
cycle)
else:
logging.info("人员变动触发分件,unknown type:%s" % type)
logging.info("人员变动触发的分件:result:%s,type:%s" % (result, type))
#bomber人员变动,进行分件
@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)
def bomber_dispatch_applications(payload, msg_id):
#通过当天的登录日志,判断人员变动,若删除bomber_log会记录
change_bombers = get_change_bomber()
instalment_change_bombers = get_instalment_change_bomber()
params = {ApplicationType.CASH_LOAN.value: change_bombers,
ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}
for type,bombers in params.items():
change_bomber_dispatch_apps(change_bombers=bombers,type=type)
@action(MessageAction.REPAIR_BOMBER)
def repair_bomber(payload, msg_id):
app_mobile = payload['app_mobile']
username = payload.get('user_name')
logging.info('start repair bomber, number: %s' % app_mobile)
# 得到用户填写的EC,确认该EC号码是否在催收中,并存储关系
if 'mobile_no' in payload and payload['mobile_no']:
mobile = number_strip(str(payload['mobile_no']))[:64]
name = payload.get('mobile_name')
application = Application.filter(Application.user_mobile_no == mobile)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, mobile, username, name)
if 'tel_no' in payload and payload['tel_no']:
tel_no = number_strip(str(payload['tel_no']))[:64]
name = payload.get('tel_name')
application = Application.filter(Application.user_mobile_no == tel_no)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, tel_no, username, name)
def repair_contact(number, application, name):
# 填写的ec有过逾期则将号码加入contact中
application = application.first()
contact = (Contact
.filter(Contact.user_id == application.user_id,
Contact.number == number))
if not contact.exists():
Contact.create(
user_id=application.user_id,
name=name,
number=number,
relationship=Relationship.FAMILY.value,
source='repair ec',
real_relationship=Relationship.FAMILY.value
)
logging.info('add repair contact success, number: %s' % number)
def add_relationship(number, ec_number, username, name):
# 存储关系
query = (TotalContact
.objects(src_number=str(number),
dest_number=ec_number,
source=20,
is_calc=False
)
.first())
if not query:
TotalContact(
src_number=str(number),
src_name=username,
dest_number=ec_number,
dest_name=name,
source=20).save()
logging.info('add relationship success, number: %s' % number)
# 获取要统计的时间范围
def get_summary_daily_time():
mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')
mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')
now_date = datetime.now()
now_date_time = now_date.time()
today_str = str(now_date.date())
if now_date_time < mid_time_t1.time():
yes_date = now_date - timedelta(days=1)
yes_date_str = str(yes_date.date())
begin_str = yes_date_str + ' 17:20:00'
end_str = today_str + ' 00:00:00'
elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():
begin_str = today_str + ' 00:00:00'
end_str = today_str + ' 12:40:00'
else:
begin_str = today_str + ' 12:40:00'
end_str = today_str + ' 17:20:00'
begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')
end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')
# 记录统计的是哪天的数据
summary_datetime = now_date-timedelta(minutes=30)
summary_date = summary_datetime.date()
return begin_time, end_time, summary_date
# 每天12:40 和 17:20 和 凌晨 更新当天数据
@action(MessageAction.SUMMARY_DAILY)
def summary_daily_data(payload, msg_id):
begin_time, end_time, summary_date = get_summary_daily_time()
call_actions = (CallActionsR.select(CallActionsR.id,
CallActionsR.bomber_id,
CallActionsR.application_id,
CallActionsR.promised_date,
CallActionsR.cycle,
CallActionsR.name,
CallActionsR.number)
.where(CallActionsR.created_at >= begin_time,
CallActionsR.created_at < end_time,
CallActionsR.type << (0,1)))
summary_dailys = {}
for call in call_actions:
if call.bomber_id not in summary_dailys:
summary_dailys[call.bomber_id] = {'ptp_cnt': 0,
'call_cnt': 0,
'cycle': call.cycle,
'repayment': 0,
'bomber_id': call.bomber_id,
'summary_date':str(summary_date)}
# C2,C3的下p的件会多一条没有number和name的数据
if call.name and call.number:
summary_dailys[call.bomber_id]['call_cnt'] += 1
if call.promised_date:
summary_dailys[call.bomber_id]['ptp_cnt'] += 1
# 获取回款信息
C1_sql = """
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,a.cycle
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,2,4,5)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
""" % (begin_time, end_time)
C1_repayment = run_all_sql(C1_sql)
other_sql = """
select current_bomber_id,sum(pay_amount) as pay_amount,cycle
from (
select application_id,current_bomber_id,pay_amount,repay_at,cycle
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at,
br.cycle
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (3,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
""" % (begin_time, end_time)
other_repayment = run_all_sql(other_sql)
all_repayment = C1_repayment + other_repayment
for res in all_repayment:
bomber_id,pay_amount,cycle = res
if bomber_id in summary_dailys:
summary_dailys[bomber_id]['repayment'] += pay_amount
else:
summary_dailys[bomber_id] = {'ptp_cnt': 0,
'call_cnt': 0,
'cycle': cycle,
'repayment': pay_amount,
'bomber_id': bomber_id,
'summary_date': str(summary_date)
}
insert_values = list(summary_dailys.values())
if insert_values:
SummaryDaily.insert_many(insert_values).execute()
# 获取本cycle所有没完成的件
def get_cycle_all_no_paid_app(cycle, type=None):
apps = (Application
.select(Application.id,
Application.latest_bomber_id,
Application.ptp_bomber,
Application.promised_date,
Application.cycle)
.where(Application.cycle == cycle,
Application.status != ApplicationStatus.REPAID.value,
Application.type == type)
.dicts())
dis_app_ids = [a['id'] for a in apps]
# 将dispatch_app中的件状态更新
with db.atomic():
for idx in range(0, len(dis_app_ids), 1000):
ids = dis_app_ids[idx:idx + 1000]
q = (DispatchApp.update(status = DisAppStatus.ABNORMAL.value)
.where(DispatchApp.application << ids)
.execute())
return apps
# 根据bomber_id整理app
def get_app_logs(apps):
app_logs = {}
all_np_apps = []
all_p_apps = []
for a in apps:
latest_bomber = a["latest_bomber"]
# 2 代替催收单中latest_bomber是空的情况,
latest_bomber = a["cycle"] if not latest_bomber else latest_bomber
if latest_bomber in app_logs:
app_logs[latest_bomber]["to_ids"].append(a["id"])
else:
app_logs[latest_bomber] = {"bomber_id": latest_bomber,
"to_ids": [a["id"]],
"np_ids": [],
"p_ids": []}
if (a["promised_date"] and
a["promised_date"].date() >= datetime.now().date()):
app_logs[latest_bomber]["p_ids"].append(a["id"])
all_p_apps.append(a)
else:
app_logs[latest_bomber]["np_ids"].append(a["id"])
all_np_apps.append(a)
return app_logs, all_np_apps, all_p_apps
# 月底分件给外包员工
def month_dispatch_app_out_partner(cycle,apps,app_logs,np_apps):
# 件随机
apps = list(apps)
np_apps = list(np_apps)
random.shuffle(np_apps)
apps_len = len(apps)
np_apps_len = len(np_apps)
end = 0
all_app_precentage = 0
# 获取这个cycle所有的的外包
partners = (Partner.select()
.where(Partner.cycle == cycle,
Partner.status == PartnerStatus.NORMAL.value))
for p in partners:
all_app_precentage += p.app_percentage
for partner in partners:
# 获取外包人员
bombers = (Bomber.select()
.where(Bomber.partner == partner.id,
Bomber.is_del == 0,
Bomber.status != BomberStatus.OUTER_LEADER.value))
bids = {b.id:b for b in bombers}
if len(bids) == 0:
logging.info("cycle:%s,partner:%s,no bomber"%(cycle, partner.id))
continue
start = end
if np_apps_len >= int(apps_len * all_app_precentage):
end = start + int(apps_len * partner.app_percentage)
else:
end = (start +
int(np_apps_len * partner.app_percentage / all_app_precentage))
# 外包团队应该获分到的所有件
partner_app = np_apps[start:end]
dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)
# 剩余给内部员工的件
np_apps = np_apps[end:]
return np_apps
# 内部员工分
def month_dispatch_app_inner(cycle,np_apps,app_logs,p_apps):
sys_cycle = {1: 'AB_TEST_C1A',
2: 'AB_TEST_C1B',
3: 'AB_TEST_C2',
4: 'AB_TEST_C3'}
# 获取内容部员工
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = (Bomber.select().where(Bomber.id << sys_values,
Bomber.is_del == 0))
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
bombers = bombers.where(Bomber.instalment == 0)
bids = {b.id:b for b in bombers}
# c1b没有下p的件要进自动外呼
if cycle == Cycle.C1A.value:
np_ids = [a["id"] for a in np_apps]
# 更新没有下p的件
np = (Application
.update(status = ApplicationStatus.PROCESSING.value,
ptp_bomber = None,
latest_bomber = None)
.where(Application.id << np_ids)
.execute())
bomber_app_logs = app_logs.get(cycle, {})
# 月底分件的时候,进自动外呼的件也要有入案和出案记录
out_param = {
"application_ids": bomber_app_logs.get("to_ids", []),
"month_dispatch": 1,
"src_bomber_id": cycle,
}
new_out_record(**out_param)
in_param = {
"cycle": cycle,
"application_ids": np_ids,
"dest_bomber_id": cycle
}
new_in_record(**in_param)
bomber_app_logs["need_num"] = len(np_apps)
bomber_app_logs["form_ids"] = np_ids
bomber_app_logs["status"] = 1
else:
dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)
dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)
# 把件分给bomber
def dispatch_apps_to_bomber(cycle,apps,bids,app_logs,out_partner=True,
type=ApplicationType.CASH_LOAN.value):
apps = list(apps)
random.shuffle(apps)
# 获取每个人应该分个数
bids_list = list(bids.keys())
if len(bids_list) <= 0:
logging.info("get_dispatch_app_to_bomber no bids")
return
average_num = get_average_number(len(apps), len(bids_list))
bomber_end = 0
with db.atomic():
for index, bid in enumerate(bids_list):
current_bomber = bids.get(bid)
bomber_app_logs = app_logs.get(bid, {})
bomber_start = bomber_end
bomber_end = bomber_start + average_num[index]
bomber_apps = apps[bomber_start:bomber_end]
from_p, from_np, from_ids,status = [], [], [], 0
# 区分员工分到的件,哪些是下p的哪些是没下p的
for ba in bomber_apps:
promised_date = ba.get("promised_date")
from_ids.append(ba["id"])
if promised_date and promised_date.date() >= date.today():
from_p.append(ba["id"])
else:
from_np.append(ba["id"])
app_status = ApplicationStatus.AB_TEST.value
# c1A内部下p的件要特殊状态
if (cycle == Cycle.C1A.value and not out_partner
and type == ApplicationType.CASH_LOAN.value):
app_status = ApplicationStatus.PROCESSING.value
if from_p:
p = (Application
.update(ptp_bomber=bid,
latest_bomber=bid,
status=app_status)
.where(Application.id << from_p)
.execute())
p_ids = bomber_app_logs.get("p_ids", []) + from_p
bomber_app_logs["p_ids"] = p_ids
if from_np:
np = (Application
.update(latest_bomber=bid,
ptp_bomber=None,
status=ApplicationStatus.AB_TEST.value)
.where(Application.id << from_np)
.execute())
np_ids = bomber_app_logs.get("np_ids", []) + from_np
bomber_app_logs["np_ids"] = np_ids
in_param = {"cycle": cycle,
"dest_partner_id": current_bomber.partner_id,
"application_ids": from_ids,
"dest_bomber_id": bid,
}
if type == ApplicationType.CASH_LOAN.value:
out_param = {"src_bomber_id": bid,
"application_ids": bomber_app_logs.get("to_ids",[]),
"month_dispatch":1
}
# 出案
new_out_record(**out_param)
# 入案
new_in_record(**in_param)
else:
out_and_in_record_instalment(**in_param)
bomber_app_logs["status"] = 1
need_num = bomber_app_logs.get("need_num", 0) + average_num[index]
bomber_app_logs["need_num"] = need_num
all_form_ids = bomber_app_logs.get("form_ids", []) + from_ids
bomber_app_logs["form_ids"] = all_form_ids
# 如果是内部的分件,不用执行下面的操作
if not out_partner:
continue
# 分给外包的件,要记录在dispatch_app中.将原来的记录删除,在插入新的数据
try:
(DispatchApp.delete()
.where(DispatchApp.application.in_(from_ids))
.execute())
dispatch_ins = [{"application": id,
"partner": current_bomber.partner_id,
"bomber": bid,
"status": DisAppStatus.NORMAL.value,
} for id in from_ids]
(DispatchApp.insert_many(dispatch_ins).execute())
except Exception as e:
logging.info(
"month_disapp_error error:%s,bid:%s,from_ids:%s" %
(str(e), bid, from_ids))
# 计算每个件的逾期天数,根据逾期天数更新对应的cycle
def calc_instalment_apps_cycle():
cycle_list = [Cycle.C2.value, Cycle.C3.value]
for cycle in cycle_list:
apps = (ApplicationR.select(ApplicationR.id,
ApplicationR.cycle,
ApplicationR.overdue_days.alias("ods"),
ApplicationR.latest_bomber,
OverdueBillR.status,
OverdueBillR.overdue_days.alias("oods"))
.join(OverdueBillR, JOIN_LEFT_OUTER,
on=ApplicationR.id == OverdueBillR.collection_id)
.where(ApplicationR.cycle == cycle,
ApplicationR.type ==
ApplicationType.CASH_LOAN_STAGING.value,
ApplicationR.status != ApplicationStatus.REPAID.value)
.dicts())
# 计算催收单真实的overdue_days
lower_apps = {}
for app in apps:
if app["status"] == ApplicationStatus.REPAID.value:
continue
aid = app["id"]
if aid in lower_apps:
lower_apps[aid]["ods"] = max(app["oods"], app["ods"])
else:
lower_apps[aid] = {
"id": aid,
"cycle": cycle,
"ods": app["oods"],
}
# 计算apps的逾期天数和当前cycle是否匹配
for aid,app in lower_apps.items():
new_cycle = get_cycle_by_overdue_days(app["ods"])
if new_cycle != cycle:
update_param = {"cycle":new_cycle,
"overdue_days":app["ods"]}
entry_time = calc_entry_time(app["ods"])
update_param.update(entry_time)
# 更新催收单
(Application.update(**update_param)
.where(Application.id == aid)
.execute())
# 降cycle之后根据逾期天数更新以下几个时间
def calc_entry_time(overdue_days):
app_entry_time = {}
overdue_entry = {
"dpd1_entry": [1, 3],
"C1A_entry": [4, 10],
"C1B_entry": [11, 30],
"C2_entry": [31, 60],
"C3_entry": [61, 90]
}
for key,value in overdue_entry.items():
if value[0] <= overdue_days <= value[1]:
app_entry_time[key] = datetime.now()
else:
app_entry_time[key] = None
return app_entry_time
# 分期分件
def instalment_month_dispatch_app():
sys_cycle = {1: 'AB_TEST_C1A',
2: 'AB_TEST_C1B',
3: 'AB_TEST_C2',
4: 'AB_TEST_C3'}
# 降cycle
calc_instalment_apps_cycle()
instalment_cycle_list = Cycle.values()[:4]
for cycle in instalment_cycle_list:
apps = get_cycle_all_no_paid_app(cycle,
ApplicationType.CASH_LOAN_STAGING.value)
if not apps:
logging.info("instalment_month_dispatch no get apps,cycle:%s"%cycle)
continue
app_logs, all_np_apps, all_p_apps = get_app_logs(apps)
# 获取要分件的成员
if cycle == Cycle.C1A.value:
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = (Bomber.select().where(Bomber.id << sys_values,
Bomber.is_del == 0))
else:
bombers = (Bomber.select().where(Bomber.is_del == 0,
Bomber.instalment == cycle))
bids = {b.id:b for b in bombers}
if not bids:
logging.info("instalment_month_dispatch no bomber,cycle:%s"%cycle)
continue
dispatch_apps_to_bomber(cycle = cycle,
apps = all_p_apps,
bids = bids,
app_logs = app_logs,
out_partner = False,
type = ApplicationType.CASH_LOAN_STAGING.value)
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
dispatch_apps_to_bomber(cycle=cycle,
apps=all_np_apps,
bids=bids,
app_logs=app_logs,
out_partner=False,
type=ApplicationType.CASH_LOAN_STAGING.value)
else:
# 未下p的件要有入案记录
np_ids = [a["id"] for a in all_np_apps]
np = (Application.update(status=ApplicationStatus.UNCLAIMED.value,
ptp_bomber=None,
latest_bomber=None)
.where(Application.id << np_ids,
ApplicationStatus != ApplicationStatus.REPAID.value)
.execute())
in_param = {
"cycle": cycle,
"application_ids": np_ids,
"dest_bomber_id": cycle
}
out_and_in_record_instalment(**in_param)
# 如果有降cycle的件,也记录在历史记录中
try:
dispatch_apps_logs = []
for bid,app in app_logs.items():
alg = {
"bomber_id": bid,
"need_num": -len(app.get("to_ids", [])),
"form_ids": json.dumps(app.get("form_ids", [])),
"to_ids": json.dumps(app.get("to_ids", [])),
"np_ids": json.dumps(app.get("np_ids", [])),
"p_ids": json.dumps(app.get("p_ids", [])),
"status": 1
}
if bid in bids:
alg["need_num"] = app.get("need_num", 0)
dispatch_apps_logs.append(alg)
if dispatch_apps_logs:
DispatchAppLogs.insert_many(dispatch_apps_logs).execute()
except Exception as e:
logging.info(
"instalment_dispatch_app_month log error.cycle:%s,error:%s" % (
cycle, str(e)))
# 每个月月底进行所有件重新分配
@action(MessageAction.MONTH_DISPATCH_APP)
def month_dispatch_app(payload, msg_id):
# 判断几天的日期是不是1号
if datetime.today().day != 1:
logging.info("今天不是1号,不能执行分期件")
return
cycle_list = [Cycle.C1A.value,
Cycle.C1B.value,
Cycle.C2.value,
Cycle.C3.value]
with db.atomic():
for cycle in cycle_list:
apps = get_cycle_all_no_paid_app(cycle,
ApplicationType.CASH_LOAN.value)
if not apps:
logging.info("month_dispatch_app not get apps.cycle:%s"%cycle)
continue
app_logs, all_np_apps, all_p_apps = get_app_logs(apps)
np_apps = month_dispatch_app_out_partner(cycle=cycle,
apps=apps,
app_logs=app_logs,
np_apps = all_np_apps)
if not np_apps and not all_p_apps:
logging.info("month_dispatch_app not get inner apps.cycle:%s",
cycle)
continue
month_dispatch_app_inner(cycle,np_apps,app_logs,all_p_apps)
# 分件日志记录在表中
try:
dispatch_apps_logs = []
for bid,app in app_logs.items():
alg = {
"bomber_id": bid,
"need_num": app.get("need_num",0),
"form_ids": json.dumps(app.get("form_ids", [])),
"to_ids": json.dumps(app.get("to_ids", [])),
"np_ids": json.dumps(app.get("np_ids", [])),
"p_ids": json.dumps(app.get("p_ids", [])),
"status": 1
}
dispatch_apps_logs.append(alg)
for idx in range(0, len(dispatch_apps_logs), 10):
DispatchAppLogs.insert_many(
dispatch_apps_logs[idx:idx + 10]).execute()
except Exception as e:
logging.error(
"insert dispatch_log error:%s,cycle:%s"%(str(e),cycle))
try:
instalment_month_dispatch_app()
except Exception as e:
logging.info("instalment_month_dispatch_error:%s"%str(e))
# 每天定时统计催收单信息
@action(MessageAction.SUMMARY_BOMBER_OVERDUE)
def summary_bomber_overdue_everyday(payload, msg_id):
cycle_list = Cycle.values()
which_day = date.today()
# 获取每个cycle没有完成的订单
for cycle in cycle_list:
apps = (ApplicationR.select(ApplicationR.id,
ApplicationR.cycle,
ApplicationR.ptp_bomber,
ApplicationR.overdue_days,
ApplicationR.promised_date,
ApplicationR.follow_up_date,
ApplicationR.external_id,
OverdueBillR.status,
OverdueBillR.periods,
OverdueBillR.sub_bill_id)
.join(OverdueBillR, JOIN_LEFT_OUTER,
on = ApplicationR.id == OverdueBillR.collection_id)
.where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.no_active == 0,
ApplicationR.cycle == cycle)
.dicts())
bomber_overdue_list = []
for app in apps:
status = app.get("status")
if status == ApplicationStatus.REPAID.value:
continue
ptp_bomber = app.get("ptp_bomber")
promised_date = app.get("promised_date")
follow_up_date = app.get("follow_up_date")
if not promised_date or promised_date.date() < date.today():
ptp_bomber = promised_date = None
if not follow_up_date or follow_up_date.date() < date.today():
follow_up_date = None
overdue_dict = {
"collection_id": app.get("id"),
"external_id": app.get("external_id"),
"sub_bill_id": app.get("sub_bill_id"),
"periods": app.get("periods"),
"cycle": app.get("cycle") if app.get("cycle") else cycle,
"ptp_bomber": ptp_bomber,
"promised_date": promised_date,
"follow_up_date": follow_up_date,
"which_day": which_day,
"overdue_days": app.get("overdue_days")
}
bomber_overdue_list.append(overdue_dict)
try:
if bomber_overdue_list:
with db.atomic():
for index in range(0, len(bomber_overdue_list), 1000):
insert_list = bomber_overdue_list[index: index+1000]
BomberOverdue.insert_many(insert_list).execute()
except Exception as e:
logging.info(
"summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s"%(
cycle,str(which_day),str(e)))
# 每分钟对员工的下p件个数做个统计
@action(MessageAction.BOMBER_PTP_REAL_TIME_SUMMARY)
def bomber_ptp_real_time_summary(payload, msg_id):
ptp_switch_number = 200
sys_ptp_switch = (SystemConfig.select()
.where(SystemConfig.key == 'PTP_SWITCH_NUMBER')
.first())
if sys_ptp_switch and sys_ptp_switch.value.isdigit():
ptp_switch_number = int(sys_ptp_switch.value)
today = datetime.today().date()
ptp_apps = (ApplicationR.select(fn.COUNT(ApplicationR.id).alias('ptp_cnt'),
ApplicationR.latest_bomber)
.where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.cycle < Cycle.C2.value,
ApplicationR.promised_date >= today,
ApplicationR.latest_bomber.is_null(False))
.group_by(ApplicationR.latest_bomber))
bomber_ptps = (BomberPtp.select(BomberPtp.bomber_id))
bomber_ptp_bids = [b.bomber_id for b in bomber_ptps]
insert_result = []
for app in ptp_apps:
ptp_switch = BomberCallSwitch.ON.value
if app.ptp_cnt >= ptp_switch_number:
ptp_switch = BomberCallSwitch.OFF.value
params = {"bomber_id": app.latest_bomber_id,
"ptp_cnt": app.ptp_cnt,
"ptp_switch": ptp_switch,
"auto_ext": app.latest_bomber.auto_ext}
if app.latest_bomber_id in bomber_ptp_bids:
try:
q = (BomberPtp.update(**params)
.where(BomberPtp.bomber_id==app.latest_bomber_id)
.execute())
except Exception as e:
logging.error("ptp_reil_time_summary_error:%s,data,bid:%s" % (
str(e),params,app.latest_bomber_id))
else:
insert_result.append(params)
if insert_result:
BomberPtp.insert_many(insert_result).execute()
# 每天的10:00,14:00,16:30不让接自动外呼,员工把自动外呼的件跟进完,才能接自动外呼
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)
def today_ptp_auto_call_switch(payload, msg_id):
today = datetime.today().date()
next_day = today + timedelta(days=1)
# 获取有今天p到期的件的催收员
apps = (ApplicationR.select(ApplicationR.latest_bomber)
.where(ApplicationR.promised_date < next_day,
ApplicationR.promised_date >= today,
ApplicationR.promised_date.is_null(False),
ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.cycle < Cycle.C2.value,
ApplicationR.latest_bomber.is_null(False))
.group_by(ApplicationR.latest_bomber))
bids = [a.latest_bomber_id for a in apps]
if not bids:
return
q = (BomberPtp.update(today_switch=BomberCallSwitch.OFF.value)
.where(BomberPtp.auto_ext.is_null(False),
BomberPtp.bomber_id << bids)
.execute())
# 每天早上8点定时刷新催收员自动外呼的状态
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)
def update_today_switch_every_day(payload, msg_id):
q = (BomberPtp.update(today_switch=BomberCallSwitch.ON.value)
.where(BomberPtp.auto_ext.is_null(False))
.execute())
# 用户修改电话通知bomber
@action(MessageAction.USER_UPDATE_PHONE)
def user_change_phone(payload, msg_id):
user_id = payload.get("user_id")
new_mobile_no = payload.get("new_mobile_no")
if not all([user_id, new_mobile_no]):
logging.info("用户修改电话,没有获取到用户id获这用户手机号")
return
source = 'applicant updated number'
contacts = (Contact.select()
.where(Contact.user_id == int(user_id)))
if not contacts.exists():
logging.info("用户在contact中没有记录")
return
new_contact = contacts.where(Contact.number == new_mobile_no,
Contact.source == source)
if new_contact.exists():
logging.info("用户手机号已存在")
return
contact = contacts.order_by(-Contact.created_at).first()
Contact.create(user_id=contact.user_id,
name=contact.name,
number = new_mobile_no,
source = source,
relationship = Relationship.APPLICANT.value,
real_relationship = Relationship.APPLICANT.value)
|
normal
|
{
"blob_id": "1fbe9078748b00efad0211b29ad572df97cda921",
"index": 1958,
"step-1": "<mask token>\n\n\ndef dpd1_process(lst):\n \"\"\"已废弃的方法\"\"\"\n if not lst:\n return\n for key, l in lst.items():\n rule = getattr(BeforeInBomber, key).value\n query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)\n ).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(\n rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.\n call_success()))\n success_set = {i.loanid for i in query}\n failed_list = list(set(l) - success_set)\n post_params = {'$and': rule.get('$and'), 'app_list': failed_list}\n resp = Hyperloop().post('/bomber/score/verify', json=post_params)\n if not resp.ok:\n logging.error('hyperloop score verification failed: %s, %s',\n str(resp.status_code), str(resp.text))\n logging.error('hyperloop score verification failed: %s', str(\n post_params))\n continue\n logging.debug('hyperloop score verification success: %s', resp.content)\n resp_json = resp.json()\n app_list = resp_json['data']\n if not app_list:\n continue\n for item in app_list:\n if random.randint(0, 5) == 1:\n send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':\n int(item)})\n\n\n@action(MessageAction.GET_IVR)\ndef get_ivr(payload, msg_id):\n logging.warning('start get_ivr')\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'DPD1-3_INTO_IVR').first()\n now = date.today()\n if sys_config and sys_config.value:\n start = now - timedelta(days=3)\n else:\n start = now\n end = now + timedelta(days=4)\n item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()\n if not item:\n AutoIVR.delete().execute()\n current_page = 0\n elif item.current_page >= item.total_page:\n return\n else:\n current_page = item.current_page\n auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,\n 'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,\n 'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':\n 10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,\n 'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,\n 'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,\n 'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,\n 'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,\n 'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,\n 'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,\n 'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,\n 'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,\n 'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,\n 'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':\n 101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,\n 'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,\n 'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,\n 'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}\n current_page += 1\n with db.atomic() as transaction:\n while True:\n bill_service = BillService()\n ivr_action = bill_service.ivr_pages(page=current_page,\n page_size=500, start_time=utc_datetime(str(start)),\n end_time=utc_datetime(str(end)))\n result = ivr_action['result']\n page_size = int(ivr_action.get('page_size', 0))\n total_page = int(ivr_action.get('total_page', 0))\n insert_args = []\n for a in result:\n due_at = no_utc_datetime(a['due_at'])\n days = (due_at.date() - now).days\n if days == 2:\n continue\n if days > 0:\n time = str(days)\n else:\n time = str(days).replace('-', 'PDP')\n key = a['app_name'] + str(a['su']) + time\n group = auto_ivr.get(key)\n user_id = a['user_id']\n try:\n user_resp = AccountService().get_user(path_params={\n 'user_id': user_id})\n if str(user_resp['mobile_no']) == str(a['user_mobile_no']):\n numbers = a['user_mobile_no']\n else:\n numbers = a['user_mobile_no'] + ',' + user_resp.get(\n 'mobile_no')\n except:\n logging.error('Request Account Service Error.')\n numbers = a['user_mobile_no']\n insert_args.append({'application_id': a['id'], 'numbers':\n numbers, 'group': group, 'user_id': user_id})\n AutoIVR.insert_many(insert_args).execute()\n if current_page == 1:\n IVRActionLog.create(total_page=total_page, proc_date=now,\n page_size=page_size, current_page=current_page)\n item = IVRActionLog.get(IVRActionLog.proc_date == now)\n else:\n item.current_page = current_page\n item.page_size = page_size\n item.total_page = total_page\n item.save()\n transaction.commit()\n current_page += 1\n if current_page > int(total_page):\n break\n if sys_config and sys_config.value:\n try:\n classfiy_dpd_ptp_apps()\n except Exception as e:\n logging.error('dpd1-3_test_error:%s' % str(e))\n\n\ndef ivr_t2_test():\n t2_groups = [39, 40, 41, 42, 43, 44]\n ivr_test_proportion = 0.2\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'IVR_TEST_PROPORTION').first()\n if sys_config and sys_config.value:\n ivr_test_proportion = float(sys_config.value)\n t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.\n status == AutoIVRStatus.AVAILABLE.value)\n t2_dict = defaultdict(list)\n for ivr in t2_ivrs:\n t2_dict[ivr.group].append(ivr.id)\n test_ivr_ids = []\n for group, ivr_ids in t2_dict.items():\n number = ceil(len(ivr_ids) * ivr_test_proportion)\n test_ivr_ids += ivr_ids[:number]\n if not test_ivr_ids:\n return\n q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.\n group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.APP_MERGE)\n@deprecated(version='1.0', reason='This function will be removed soon')\ndef app_merge(payload, msg_id):\n sql = \"\"\"\n select *\n from (\n select a.id as id\n from dashboard.application as a\n inner join repayment.bill2 as b on b.external_id = a.id\n where not exists (\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.app = 'DanaCepat'\n and a.is_first_loan = 1\n and a.apply_at < '2018-08-23 20:50:00'\n and b.overdue_days between 1 and 3\n and b.status != 2) result\n where not exists (\n select 1\n from bomber.application as a\n where a.cycle = 1\n and a.status = 4\n and a.id = result.id\n )\n \"\"\"\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n new_data = cursor.fetchall()\n cursor.close()\n if new_data:\n bomber = [103, 104]\n for d in new_data:\n app_id = {'id': d[0]}\n application_overdue(app_id, None)\n Application.update(status=ApplicationStatus.AB_TEST.value,\n latest_bomber=random.choice(bomber), ptp_bomber=None).where(\n Application.id == d[0]).execute()\n logging.warning('add new app success')\n ptp = date.today() - timedelta(days=1)\n del_sql = (\n \"\"\"\n select a.id\n from bomber.application as a\n where exists(\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.cycle = 1\n and a.status = 4\n and (a.promised_date is null or a.promised_date < \"%s\")\n \"\"\"\n % ptp)\n cursor = readonly_db.get_cursor()\n cursor.execute(del_sql)\n del_date = cursor.fetchall()\n cursor.close()\n if del_date:\n return\n ids = list()\n for d in del_date:\n ids.append(d[0])\n Application.update(status=ApplicationStatus.UNCLAIMED.value,\n latest_bomber=None).where(Application.id << ids).execute()\n\n\n@action(MessageAction.APPLICATION_BOMBER)\ndef application_overdue(payload, msg_id):\n application_id = payload['id']\n sub_bill_id = payload['bill_sub_id']\n local_app = Application.select().where(Application.external_id ==\n application_id).order_by(Application.finished_at).first()\n if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:\n logging.info('application %s overdue, already exists', application_id)\n add_contact(local_app)\n return\n if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:\n overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==\n sub_bill_id, OverdueBillR.external_id == application_id)\n if overdue_bill.exists():\n logging.info(\n 'application %s,sub_bill_id %s overdue, already exists' % (\n application_id, sub_bill_id))\n return\n try:\n sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])\n sub_bill = sub_bill[0]\n except Exception:\n logging.error(\n 'application %s overdue, get sub_bill info failed:Request To repayment Error'\n , application_id)\n return\n if sub_bill['status'] == 2:\n logging.error('application %s overdue, but bills already cleared',\n application_id)\n return\n overdue_days = sub_bill.get('overdue_days', 0)\n if overdue_days == 0:\n logging.info('application {} no overdue'.format(str(application_id)))\n return\n gold_eye = GoldenEye().get('/applications/%s' % application_id)\n if not gold_eye.ok:\n logging.error('get application %s failed: Request to GoldenEye.',\n application_id)\n return\n gold_app = gold_eye.json().get('data')\n user_id = gold_app['user_id']\n apply_history = Dashboard().get('/users/%s/apply-history' % user_id)\n if not apply_history.ok:\n logging.error(\n 'get user %s apply history failed: Request to Dashboard Failed.',\n user_id)\n return\n history = apply_history.json().get('data')\n loan_success_times = len([(1) for i in history if i['status'] in [80, \n 90, 100, 70] and i['id'] != gold_app['id']])\n id = application_id\n type = ApplicationType.CASH_LOAN.value\n bill_id = sub_bill.get('bill_id')\n amount = sub_bill.get('amount')\n amount_net = sub_bill.get('amount_net')\n interest_rate = sub_bill.get('interest_rate')\n overdue_days = sub_bill.get('overdue_days')\n origin_due_at = sub_bill.get('origin_due_at')\n sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,\n 'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),\n 'overdue_days': overdue_days, 'origin_due_at': origin_due_at,\n 'amount': amount, 'amount_net': amount_net, 'interest_rate':\n interest_rate, 'external_id': application_id}\n if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:\n if local_app and local_app.status != ApplicationStatus.REPAID.value:\n sub_overdue_bill['collection_id'] = local_app.id\n local_app.amount += amount\n local_app.amount_net += amount_net\n local_app.save()\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('application %s,sub_bill_id:%s overdue created' %\n (application_id, sub_bill_id))\n return\n else:\n id = idg()\n type = ApplicationType.CASH_LOAN_STAGING.value\n sub_overdue_bill['collection_id'] = id\n ptp_info = BombingHistory.filter(BombingHistory.application == id).first()\n promised_amount = ptp_info and ptp_info.promised_amount\n promised_date = ptp_info and ptp_info.promised_date\n application = Application.create(id=id, user_id=gold_app['user_id'],\n user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[\n 'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],\n contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(\n 'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=\n birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(\n gold_app.get('id_ektp')), profile_province=(gold_app.get(\n 'profile_province') or {}).get('name'), profile_city=(gold_app.get(\n 'profile_city') or {}).get('name'), profile_district=(gold_app.get(\n 'profile_district') or {}).get('name'), profile_residence_time=\n gold_app.get('profile_residence_time'), profile_residence_type=\n gold_app.get('profile_residence_type'), profile_address=gold_app.\n get('profile_address'), profile_education=gold_app.get(\n 'profile_education'), profile_college=(gold_app.get(\n 'profile_college') or {}).get('name'), job_name=gold_app.get(\n 'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get\n ('job_bpjs'), job_user_email=gold_app.get('job_user_email'),\n job_type=gold_app.get('job_type'), job_industry=gold_app.get(\n 'job_industry'), job_department=gold_app.get('job_department'),\n job_province=(gold_app.get('job_province') or {}).get('name'),\n job_city=(gold_app.get('job_city') or {}).get('name'), job_district\n =(gold_app.get('job_district') or {}).get('name'), job_address=\n gold_app.get('job_address'), amount=amount, amount_net=amount_net,\n interest_rate=interest_rate, term=gold_app.get('term'),\n origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=\n sub_bill.get('repay_at'), loan_success_times=loan_success_times,\n arrived_at=datetime.now(), follow_up_date=datetime.now(),\n promised_amount=promised_amount, promised_date=promised_date,\n external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=\n datetime.now())\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('overdue application %s created', application_id)\n Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,\n status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)\n add_contact(application)\n\n\n<mask token>\n\n\ndef get_contact_from_mongo(number):\n if not number:\n return []\n query = TotalContact.objects(src_number=number, source__in=TotalContact\n .available()).order_by('source')\n lst = []\n for c in query:\n relation = TotalContact.relationship(c.source)\n if relation == -1:\n continue\n source = TotalContact.str_source(c.source)\n if not source:\n continue\n lst.append({'related_number': c.dest_number, 'source': source,\n 'is_calc': c.is_calc, 'total_count': c.total_count,\n 'total_duration': c.total_duration, 'relation': relation,\n 'name': c.dest_name})\n return lst\n\n\n<mask token>\n\n\ndef check_key_not_none(payload, keys):\n for key in keys:\n if payload.get(key) is None:\n logging.error('Missing args {}'.format(str(key)))\n return False\n return True\n\n\n<mask token>\n\n\n@action(MessageAction.BILL_RELIEF)\ndef bill_relief(payload, msg_id):\n \"\"\"已废弃\"\"\"\n bill = payload['head_bill']\n repay_at = str_no_utc_datetime(bill['latest_repay_at'])\n updated_row = Application.update(repay_at=repay_at).where(Application.\n id == bill['external_id']).execute()\n logging.info('application %s bill relief done', bill['external_id'])\n return updated_row\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)\ndef calc_overdue_days_over(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query = Application.update(overdue_days=overdue_days).where(Application\n .status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.\n overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)\n updated_rows_count = query.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count)\n try:\n calc_overdue_days_over_instalment()\n except Exception as e:\n logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))\n apps = Application.filter(Application.status << [ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value], Application.overdue_days > 95, \n Application.promised_date.is_null(True) | (fn.DATE(Application.\n promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {\n 'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n\n\n<mask token>\n\n\ndef calc_overdue_days_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n today_now_time = datetime.now()\n month_first_day = today_now_time.replace(day=1, hour=1, minute=30,\n second=0, microsecond=0)\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days <= 95)\n updated_rows_count = query.execute()\n logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %\n (updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.\n created_at, OverdueBill.collection_id, OverdueBill.overdue_days\n ).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.\n collection_id == Application.id).where(Application.status ==\n status, Application.type == ApplicationType.CASH_LOAN_STAGING.value\n )\n app_update = {}\n for ob in overdue_bills:\n if (ob.status == ApplicationStatus.REPAID.value and ob.\n created_at < month_first_day):\n continue\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)\ndef automatic_escalation(payload, msg_id):\n app_ids = payload.get('application_list', [])\n if not app_ids:\n return\n apps = Application.select().where(Application.id.in_(app_ids), \n Application.status != ApplicationStatus.REPAID.value)\n for a in apps:\n new_cycle = application_entry_different_calculations(a)\n if a.overdue_days < 90:\n logging.info(\n 'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'\n .format(a.id, new_cycle, a.cycle, a.overdue_days))\n if new_cycle > a.cycle:\n with db.atomic():\n if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle\n .C1B.value):\n bomber_id = (a.latest_bomber_id if a.latest_bomber_id else\n a.cycle)\n DispatchAppHistory.update(out_at=datetime.now(),\n out_overdue_days=a.overdue_days).where(\n DispatchAppHistory.application == a.id, \n DispatchAppHistory.bomber_id == bomber_id).execute()\n Escalation.create(application=a.id, type=EscalationType.\n AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,\n current_cycle=a.cycle, escalate_to=new_cycle,\n current_bomber_id=a.latest_bomber)\n dis_app_update = DispatchApp.update(status=DisAppStatus.\n ABNORMAL.value).where(DispatchApp.application == a.id)\n dis_app_update.execute()\n a.cycle = new_cycle\n a.last_bomber = a.latest_bomber\n a.status = ApplicationStatus.UNCLAIMED.value\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n if new_cycle == Cycle.C1B.value:\n a.C1B_entry = datetime.now()\n elif new_cycle == Cycle.C2.value:\n a.C2_entry = datetime.now()\n elif new_cycle == Cycle.C3.value:\n a.C3_entry = datetime.now()\n a.save()\n logging.info('automatic escalation done')\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY)\ndef cron_summary(payload, msg_id):\n \"\"\"已废弃\"\"\"\n employees = Bomber.select(Bomber, Role).join(Role)\n summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,\n 'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,\n 'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':\n 0, 'sms_sent': 0} for i in employees}\n now_date = date.today()\n cal_date = now_date - timedelta(days=1)\n claimed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('claimed')).where(fn.DATE(Application.\n claimed_at) == cal_date, Application.status << [ApplicationStatus.\n PROCESSING.value, ApplicationStatus.REPAID.value], Application.\n latest_bomber.is_null(False)).group_by(Application.latest_bomber)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False)).group_by(\n Application.latest_bomber)\n completed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('completed')).where(Application.\n latest_bombing_time.is_null(False), fn.DATE(Application.\n latest_bombing_time) == cal_date, Application.latest_bomber.is_null\n (False)).group_by(Application.latest_bomber)\n escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(\n Escalation.id).alias('escalated')).where(fn.DATE(Escalation.\n created_at) == cal_date, Escalation.type == EscalationType.\n AUTOMATIC.value, Escalation.current_bomber.is_null(False), \n Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation\n .current_bomber)\n transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).\n alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==\n cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(\n Transfer.operator)\n promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(\n BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.\n created_at) == cal_date, BombingHistory.result == BombingResult.\n HAS_PROGRESS.value).group_by(BombingHistory.bomber)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,\n RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.\n current_bomber)\n calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id\n ).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==\n cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)\n calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.\n record_id).alias('calls_connected')).where(fn.DATE(CallLog.\n time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==\n '1').group_by(CallLog.user_id)\n sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(\n ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_\n (ConnectType.sms()), ConnectHistory.created_at >= cal_date, \n ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)\n for i in claimed:\n summary[i.latest_bomber_id]['claimed'] += i.claimed\n for i in completed:\n summary[i.latest_bomber_id]['completed'] += i.completed\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n for i in escalated:\n summary[i.current_bomber_id]['escalated'] += i.escalated\n for i in transferred:\n summary[i.operator_id]['transferred'] += i.transferred\n for i in promised:\n summary[i.bomber_id]['promised'] += i.promised\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in calls_made:\n summary[int(i.user_id)]['calls_made'] += i.calls_made\n for i in calls_connected:\n summary[int(i.user_id)]['calls_connected'] += i.calls_connected\n for i in sms_sent:\n summary[i.operator_id]['sms_sent'] += i.sms_sent\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'claimed': data['claimed'], 'completed': data['completed'],\n 'cleared': data['cleared'], 'escalated': data['escalated'],\n 'transferred': data['transferred'], 'promised': data['promised'\n ], 'amount_recovered': data['amount_recovered'], 'calls_made':\n data['calls_made'], 'calls_connected': data['calls_connected'],\n 'sms_sent': data['sms_sent'], 'date': cal_date})\n if insert_args:\n Summary.insert_many(insert_args).execute()\n cycle_args = []\n escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(\n Escalation.id).alias('escalated_in')).where(Escalation.status ==\n ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==\n cal_date).group_by(Escalation.escalate_to)\n for i in escalated_in:\n cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.\n escalated_in, 'date': cal_date})\n amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM\n (RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)\n for i in amount_recovered_total:\n amount_recovered_total = i.principal_part + i.late_fee_part\n cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':\n amount_recovered_total, 'date': cal_date})\n if cycle_args:\n Summary.insert_many(cycle_args).execute()\n logging.info('cal summary done')\n send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY2)\ndef cron_summary2(payload, msg_id):\n \"\"\"已废弃,定时任务还在执行,具体情况待确定\"\"\"\n cal_date = date.today() - timedelta(days=1)\n employees = Bomber.select(Bomber, Role).join(Role)\n auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,\n AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')\n ).where(fn.DATE(AutoCallActions.created_at) == cal_date)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.\n is_null(False), RepaymentLog.is_bombed == True)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False))\n auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,\n AutoCallActions.result)\n amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)\n cleared = cleared.group_by(Application.latest_bomber)\n summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,\n 'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered': \n 0} for e in employees}\n for a in auto_call_actions:\n summary[a.bomber_id]['answered_calls'] += a.count\n if a.result == AutoCallResult.PTP.value:\n summary[a.bomber_id]['ptp'] += a.count\n if a.result == AutoCallResult.FOLLOW_UP.value:\n summary[a.bomber_id]['follow_up'] += a.count\n if a.result == AutoCallResult.NOT_USEFUL.value:\n summary[a.bomber_id]['not_useful'] += a.count\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'answered_calls': data['answered_calls'], 'ptp': data['ptp'],\n 'follow_up': data['follow_up'], 'not_useful': data['not_useful'\n ], 'cleared': data['cleared'], 'amount_recovered': str(data[\n 'amount_recovered']), 'date': cal_date})\n if insert_args:\n Summary2.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_DISCOUNT_APPROVED)\ndef bomber_discount_approved(payload, msg_id):\n app_id = payload['id']\n msg_type = payload['msg_type']\n discount_to = payload['discount_to']\n effective_to = payload['effective_to']\n application = Application.filter(Application.id == app_id).first()\n if not application:\n logging.error(\n 'discount approved msg send failed application %s not found',\n app_id)\n return\n template = Template.select(Template.text, Template.app).where(Template.\n type == ConnectType.AUTO_SMS.value, Template.id << Template.\n get_auto_sms_tpl(msg_type), Template.app == application.app).first()\n if not template:\n logging.error('discount approved msg send failed template %s not found'\n , msg_type)\n return\n promised_date = None\n if application.promised_date:\n promised_date = application.promised_date.strftime('%d-%m-%Y')\n tpl_data = {'user_name': application.user_name, 'due_days': application\n .overdue_days, 'app_name': application.app, 'phone': application.\n user_mobile_no, 'cs_number': cs_number_conf.get(application.app,\n '02150202889'), 'promised_date': promised_date, 'discount_to':\n discount_to, 'effective_to': effective_to}\n content = template.text.format(**tpl_data)\n data_list = [{'receiver': '62' + application.user_mobile_no, 'content':\n content, 'title': ''}]\n send_sms(data_list, msg_type, application.app)\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST)\ndef bomber_auto_call_list(payload, msg_id):\n with db.atomic():\n bomber_dispatch_app()\n dispatch_instalment_app()\n dis_apps = DispatchApp.select(DispatchApp.application).where(\n DispatchApp.status == DisAppStatus.NORMAL.value)\n c1_apps = Application.select(Application.id, Application.cycle,\n Application.follow_up_date, Application.called_times).where(Application\n .status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.\n AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.\n is_rejected == False, Application.promised_date.is_null(True) | (fn\n .DATE(Application.promised_date) < datetime.today().date())).order_by(\n Application.overdue_days, Application.apply_at)\n dis_apps_ids = [da.application_id for da in dis_apps]\n insert_args = []\n for a in c1_apps:\n if a.id in dis_apps_ids:\n continue\n insert_args.append({'application': a.id, 'cycle': a.cycle,\n 'follow_up_date': a.follow_up_date, 'called_times': 1 if a.\n called_times else 0, 'description': 'init'})\n if not insert_args:\n logging.error('no application need auto call')\n with db.atomic():\n AutoCallList.delete().execute()\n for idx in range(0, len(insert_args), 100):\n AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()\n for idx in range(0, len(insert_args), 100):\n application_list = [i['application'] for i in insert_args[idx:idx +\n 100]]\n send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {\n 'application_list': application_list})\n logging.info('bomber generate auto call list finished')\n send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})\n\n\nclass ChangeBomberTool(object):\n\n @staticmethod\n def in_record(bomber_id, ids, bd):\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.\n alias('application_id'), R(str(bomber_id)).alias('bomber_id'),\n fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(\n 'expected_out_time'), Application.overdue_days.alias(\n 'entry_overdue_days')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << ids)\n Application.update(latest_bomber=bomber_id).where(Application.id.\n in_(ids)).execute()\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bd),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n @staticmethod\n def out_record(a, bd):\n _id = str(a.id)\n DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a\n .overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]\n .get('principal_paid')), out_late_fee_pending=bd[_id].get(\n 'late_fee') - bd[_id].get('late_fee_paid')).where(\n DispatchAppHistory.application == a.id, DispatchAppHistory.\n bomber_id == a.latest_bomber_id).execute()\n a.last_bomber = a.latest_bomber\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n a.save()\n\n @staticmethod\n def classify(l, b):\n if len(l) == 1:\n return l[0]\n _l = filter(lambda x: x['bomber'] != b, l)\n return min(_l, key=lambda x: len(x['ids']))\n\n\n<mask token>\n\n\ndef bomber_dispatch_app():\n try:\n c1a_dispatch_app()\n except Exception as e:\n logging.error('c1a_dispatch_app error:%s' % str(e))\n cycle = {(1): 10, (2): 30, (3): 60, (4): 90}\n apps = Application.select().where(fn.DATE(Application.C2_entry) == date\n .today(), Application.type == ApplicationType.CASH_LOAN.value)\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C2.value)\n apps_ids = [a.id for a in apps]\n dispatch_inserts = []\n start_index = 0\n apps_length = len(apps_ids)\n logging.warning('apps length %s' % str(apps_length))\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)\n gen = CycleIter([b.id for b in bombers])\n existing_list = []\n end_index = start_index + int(apps_length * p.app_percentage)\n logging.info('partner length %s' % str(end_index))\n if not apps_ids[start_index:end_index]:\n continue\n bills = BillService().bill_list(application_ids=apps_ids[\n start_index:end_index])\n bill_dict = {bill['application_id']: bill for bill in bills}\n for a_id in apps_ids[start_index:end_index]:\n bomber = average_gen(gen, existing_list)\n q = DispatchApp.delete().where(DispatchApp.application == a_id\n ).execute()\n dispatch_inserts.append({'application': a_id, 'bomber': bomber,\n 'partner': p.id})\n application = Application.select().where(Application.id == a_id\n ).first()\n application.latest_bomber = bomber\n application.status = ApplicationStatus.AB_TEST.value\n application.ptp_bomber = None\n application.save()\n day_next_cycle = cycle.get(application.cycle\n ) - application.overdue_days\n DispatchAppHistory.create(application=a_id, partner_id=p.id,\n bomber_id=bomber, entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - Decimal(\n bill_dict[a_id].get('principal_paid')),\n entry_late_fee_pending=Decimal(bill_dict[a_id].get(\n 'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')\n ), expected_out_time=date.today() + timedelta(days=\n day_next_cycle))\n start_index = end_index\n with db.atomic():\n for idx in range(0, len(dispatch_inserts), 100):\n DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()\n config = SystemConfig.prefetch(SCI.AB_TEST_C2)\n c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)\n c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)\n c2 = apps_ids[start_index:]\n if c2:\n bills = BillService().bill_list(application_ids=c2)\n else:\n bills = []\n bill_dict = {bill['application_id']: bill for bill in bills}\n logging.info('c2 AB_test length: %s' % str(c2))\n gen = CycleIter(c2_bomber)\n existing_list = []\n for c in c2:\n bomber = average_gen(gen, existing_list)\n application = Application.filter(Application.id == c).first()\n application.status = ApplicationStatus.AB_TEST.value\n application.latest_bomber = bomber\n application.ptp_bomber = None\n application.save()\n day_next_cycle = 46 - application.overdue_days\n DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at\n =datetime.now(), entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - bill_dict[c].get(\n 'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(\n 'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),\n expected_out_time=date.today() + timedelta(days=day_next_cycle))\n ab_test_other()\n\n\ndef c1a_dispatch_app():\n today = datetime.today().date()\n tomorrow = today + timedelta(days=1)\n c1a_apps = Application.select().where(Application.status << [\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value], Application.dpd1_entry >= today, Application.dpd1_entry <\n tomorrow, Application.type == ApplicationType.CASH_LOAN.value)\n all_aids = [a.id for a in c1a_apps]\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C1A.value)\n end = 0\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n is_del == 0)\n start = end\n end += int(len(all_aids) * p.app_percentage)\n aids = all_aids[start:end]\n bids = [b.id for b in bombers]\n if not bids or not aids:\n continue\n average_number = get_average_number(len(aids), len(bids))\n p_end = 0\n for i, bid in enumerate(bids):\n p_start = p_end\n p_end += average_number[i]\n b_aids = aids[p_start:p_end]\n with db.atomic():\n q = Application.update(latest_bomber=bid, status=\n ApplicationStatus.AB_TEST.value).where(Application.id <<\n b_aids).execute()\n params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,\n 'application_ids': b_aids, 'dest_bomber_id': bid}\n new_in_record(**params)\n try:\n dispatch_inserts = []\n for aid in b_aids:\n dispatch_inserts.append({'application': aid, 'bomber':\n bid, 'partner': p.id, 'status': DisAppStatus.NORMAL\n .value})\n if dispatch_inserts:\n q = DispatchApp.insert_many(dispatch_inserts).execute()\n except Exception as e:\n logging.error('c1a分件写入dispatch_app error:%s' % str(e))\n\n\n<mask token>\n\n\ndef get_cash_bomber(bids, cycle):\n cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==\n 0, Bomber.instalment != cycle)\n cash_bids = [b.id for b in cash_bombers]\n return cash_bids\n\n\n<mask token>\n\n\ndef out_and_in_record_instalment(**kwargs):\n if not kwargs.get('application_ids'):\n return\n out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(\n DispatchAppHistory.application << kwargs['application_ids'],\n DispatchAppHistory.out_at.is_null(True)).execute()\n cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}\n period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')\n kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(kwargs['dest_bomber_id'])).alias(\n 'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.\n alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).\n alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << kwargs[\n 'application_ids'])\n application_list = list(subquery)\n for idx in range(0, len(application_list), 50):\n applications = application_list[idx:idx + 50]\n app_ids = [i.application_id for i in applications]\n overdue_bills = OverdueBill.select().where(OverdueBill.\n collection_id << app_ids)\n sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]\n bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)\n insert_args = lambad_instalment_result(bill_list, applications)\n if not insert_args:\n continue\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef c1b_dispatch_in_record(**kwargs):\n app_ids = kwargs.get('apps')\n partner_id = kwargs.get('partner_id', 'null')\n bill_dict = kwargs.get('bill_dict')\n period = kwargs.get('period')\n bomber_id = kwargs.get('bomber_id')\n if not all([app_ids, partner_id, bill_dict, period]):\n return False\n bill_dict = {str(k): v for k, v in bill_dict.items()}\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().\n alias('entry_at'), Application.overdue_days.alias(\n 'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.id << app_ids)\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef check_call_history(application):\n app_create_at = application.created_at + timedelta(days=4)\n if datetime.today().date() > app_create_at.date():\n call_actions = CallActions.select().where(CallActions.type == 0, \n CallActions.application == application.id, CallActions.\n created_at > datetime.now() - timedelta(days=5))\n for call in call_actions:\n if call.phone_status == PhoneStatus.CONNECTED.value:\n return True\n return False\n return True\n\n\n@action(MessageAction.BOMBER_SCAVENGER)\ndef scavenger(payload, msg_id):\n scavenger_time = -60\n scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'SCAVENGER_TIME').first()\n if scavenger and scavenger.value.isdigit():\n scavenger_time = -int(scavenger.value)\n update_auto_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value, description='scavenger').where(AutoCallList.status ==\n AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime\n .now() + timedelta(minutes=scavenger_time))\n count = update_auto_call_list.execute()\n logging.info('scavenger processed %s application', count)\n mail_box_scavenger_time = -30\n mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'MAIL_BOX_SCAVENGER_TIME').first()\n if mail_box_scavenger and mail_box_scavenger.value.isdigit():\n mail_box_scavenger_time = -int(mail_box_scavenger.value)\n update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.\n value, AutoCallList.updated_at < datetime.now() + timedelta(minutes\n =mail_box_scavenger_time))\n mail_box_count = update_mail_box_call_list.execute()\n logging.info('scavenger update mail box %s', mail_box_count)\n update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value\n ).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.\n updated_at < datetime.now() + timedelta(minutes=-30))\n ivr_result = update_auto_ivr.execute()\n logging.info('scavenger update %s ivr' % ivr_result)\n\n\n<mask token>\n\n\n@action(MessageAction.REPORT_BOMBER_COLLECTION)\ndef report_bomber_collection(payload, msg_id):\n start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)\n ).scalar()\n now = datetime.now()\n if start_date and str(start_date) == str(now)[:10]:\n return\n end_date = str(now + timedelta(days=1))[:10]\n start_date = str(now)[:10]\n dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))\n all_overdue_loan_sql1 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n GROUP BY 1;\n \"\"\"\n s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()\n d1 = OperatedDict(s_data1)\n all_overdue_loan_sql2 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE DATE(ba.follow_up_date) > CURDATE()\n AND ba.called_counts = 0\n GROUP BY 1;\n \"\"\"\n s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()\n d2 = OperatedDict(s_data2)\n overdue_loans_entered_into_predict_call_system_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE ba.called_counts >= 1\n GROUP BY 1;\n \"\"\"\n s_data3 = readonly_db.execute_sql(\n overdue_loans_entered_into_predict_call_system_sql).fetchall()\n d3 = OperatedDict(s_data3)\n loans_completed_sql = \"\"\"\n SELECT ba.cycle, COUNT(DISTINCT ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()\n d4 = OperatedDict(s_data4)\n connected_calls_automatic_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()\n d5 = OperatedDict(s_data5)\n connected_calls_manual_sql = \"\"\"\n SELECT bb.cycle, COUNT(bb.id)\n FROM bomber.bombing_history bb\n WHERE DATE(bb.created_at) = curdate()\n AND (bb.bomber_id < 150 OR bb.bomber_id > 200)\n GROUP BY bb.cycle;\n \"\"\"\n s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()\n d6 = OperatedDict(s_data6)\n logging.info('Directly get data from database successfully.')\n c1 = d1 - d2\n c2 = d3\n c3 = c2 / c1\n c4 = d4\n c5 = c4 / c2\n c6 = d5\n c7 = c6 / c4\n c8 = d6\n c9 = OperatedDict(get_agent())\n c10 = (c6 + c8) / c9\n try:\n c11 = average_call_duration_team(start_date, end_date)\n except AttributeError:\n c11 = {}\n lst = []\n for i in range(1, 5):\n lst.append({'apply_date': start_date, 'cycle': dct[i],\n 'all_overdue_loan': c1.get(i, 0),\n 'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),\n 'of_overdue_loans_entered_into_predict_call_system': round(c3.\n get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),\n 'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *\n 100, 1), 'connected_calls_automatic': c6.get(i, 0),\n 'connected_calls_automatic_completed_loans': round(c7.get(i, 0),\n 1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i, \n 0), 'average_calls_agent': round(c10.get(i, 0), 1),\n 'average_call_duration_team': round(c11.get(i, 0), 1)})\n ReportCollection.insert_many(lst).execute()\n logging.info('report_bomber_collection:Done!')\n\n\n<mask token>\n\n\ndef new_out_record(**kwargs):\n if not kwargs['application_ids']:\n return\n DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.\n bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.\n application << kwargs['application_ids'], DispatchAppHistory.out_at\n .is_null(True)).execute()\n if kwargs.get('month_dispatch'):\n return\n try:\n Application.update(ptp_bomber=None).where(Application.id << kwargs[\n 'application_ids']).execute()\n except Exception as e:\n logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[\n 'application_ids'], str(e)))\n\n\n<mask token>\n\n\ndef end_old_application(old_app, paid=False):\n if paid:\n if old_app.status == OldLoanStatus.WAITING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return\n if old_app.status == OldLoanStatus.PROCESSING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return old_app.application_id\n end_date = old_app.end_date\n now = datetime.now()\n if now >= max(end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n old_app.save()\n return old_app.application_id\n\n\n<mask token>\n\n\ndef get_before_bomber(date_time):\n begin_time = str(date_time - timedelta(days=7))\n end_time = str(date_time)\n old_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at<date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (begin_time, begin_time))\n old_data = run_one_sql(old_sql)\n new_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at> '%s' \n and created_at<'%s'\n and overdue_days=1;\n \"\"\"\n % (begin_time, end_time))\n new_data = run_one_sql(new_sql)\n dpd4_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>date_add('%s',interval 1 day) \n and created_at< date_add('%s',interval 1 day)\n and overdue_days=4;\n \"\"\"\n % (begin_time, end_time))\n dpd4_data = run_one_sql(dpd4_sql)\n dpd2_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at< date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (end_time, end_time))\n dpd2_data = run_one_sql(dpd2_sql)\n all_money = old_data + new_data\n repayment = all_money - dpd4_data - dpd2_data\n pro = 0\n if all_money:\n pro = repayment / all_money * 100\n RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,\n proportion=pro, repayment=repayment)\n\n\n<mask token>\n\n\ndef get_c1a_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=19)\n repayment_sql = (\n \"\"\"\n select \n sum(b.principal_part+b.late_fee_part) as paid_amount, \n cdt\n from \n (select \n br.principal_part, br.late_fee_part, \n date(cdt) as cdt, br.repay_at, br.application_id\n from (\n select ba.id, ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n if not repayment:\n return\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n for d in repayment:\n repay = d[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n d[1], RepaymentReportInto.cycle == Cycle.C1A.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = round(repay, 3)\n pro = repay / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_c2_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n not_contain_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_money = run_one_sql(not_contain_sql)\n begin_date = date_time - timedelta(days=37)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n not_contain_repay_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_repay = run_all_sql(not_contain_repay_sql)\n if not not_contain_money and repayment:\n return\n for i in ContainOut.values():\n if i == ContainOut.NOT_CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(\n not_contain_money, 3), proportion='0', repayment=0,\n is_first_loan=is_first_loan, contain_out=ContainOut.\n NOT_CONTAIN.value)\n for repay in not_contain_repay:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n elif i == ContainOut.CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(all_money, 3),\n proportion='0', repayment=0, is_first_loan=\n is_first_loan, contain_out=ContainOut.CONTAIN.value)\n for repay in repayment:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\ndef get_c3_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id, ba.c3_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=30)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c3_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c3_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = '%s'\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 4\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C3.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n if not repayment:\n return\n for repay in repayment:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n repay[1], RepaymentReportInto.cycle == Cycle.C3.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = repay_money\n pro = 0\n if report.all_money and int(report.all_money):\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_static_bomber(begin_date):\n active_date = begin_date - timedelta(days=8)\n bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),\n BomberR.last_active_at.alias('active')).where(BomberR.\n last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])\n summary = []\n for bomber in bombers:\n summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':\n bomber.role.cycle, 'work_ind': 0})\n SummaryBomber.insert_many(summary).execute()\n\n\n<mask token>\n\n\n@time_logger\ndef get_call_and_made(end_date, begin_date, real_time_query=False):\n call_sql = (\n \"\"\"\n select \n bomber_id, \n count(case when relationship is not null then application_id end) \n as 'call_cnt', \n count(distinct case when relationship is not null then \n application_id end) as 'call_case',\n count(case when phone_status=4 then application_id end) as 'connect',\n count(distinct case when phone_status=4 then application_id end) \n as 'connect_case'\n from (\n select bomber_id,application_id,phone_status, cycle, relationship\n from bomber.call_actions ba\n where created_at>'%s' and created_at<'%s'\n and type in (0, 1)\n ) a\n group by 1\n \"\"\"\n % (begin_date, end_date))\n calls = run_all_sql(call_sql)\n if real_time_query:\n return calls\n for call in calls:\n bomber, call_cnt, case_made, connect_cnt, case_connect = call\n SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,\n call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(\n SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date\n ).execute()\n return calls\n\n\n@time_logger\ndef get_claimed_cnt(end_date, begin_date, real_time_query=False):\n table_date = begin_date - timedelta(days=30)\n claimed_sql = (\n \"\"\"\n SELECT\n COUNT( `t1`.`application_id` ) AS cnt,\n `t1`.`bomber_id` AS bomber_id \n FROM\n `dispatch_app_history` AS t1 \n WHERE\n ( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null ) \n AND ( `t1`.`bomber_id` != 1000 ) \n AND ( `t1`.`partner_id` IS null ) \n AND ( `t1`.`entry_at` > '%s' ) \n AND ( `t1`.`entry_at` < '%s' ) \n GROUP BY\n `t1`.`bomber_id`\n \"\"\"\n % (begin_date, table_date, end_date))\n claimeds = run_all_sql(claimed_sql)\n if real_time_query:\n return claimeds\n for claimed in claimeds:\n cnt, bomber_id = claimed\n SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == bomber_id).execute()\n return claimeds\n\n\ndef get_sms_data(end_data, begin_data):\n all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(\n 'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')\n ).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.\n created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())\n ).group_by(ConnectHistoryR.operator)\n for sms in all_sms:\n SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==\n begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()\n return all_sms\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_NEW)\ndef summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_active_bomber(begin_date)\n get_call_and_made(end_date, begin_date)\n get_claimed_cnt(end_date, begin_date)\n get_sms_data(end_date, begin_date)\n get_ptp_data(end_date, begin_date)\n get_recover_amount(end_date, begin_date)\n get_unfollowed(begin_date)\n get_unfollowed_call(begin_date)\n\n\ndef get_new_case_amount(begin_date, end_date):\n all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.\n entry_late_fee_pending + DispatchAppHistoryR.\n entry_principal_pending).alias('pending'), DispatchAppHistoryR.\n bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')\n ).where(DispatchAppHistoryR.entry_at > begin_date, \n DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.\n partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)\n for case in all_case:\n SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt\n =case.cnt).where(SummaryBomber.bomber_id == case.bomber_id, \n SummaryBomber.time == begin_date).execute()\n return all_case\n\n\n<mask token>\n\n\n@time_logger\ndef get_new_case_call(begin_date, end_date, real_query_time=False):\n sql = (\n \"\"\"\n SELECT\n bd.bomber_id,\n count( DISTINCT bd.application_id )\n FROM\n bomber.dispatch_app_history bd\n INNER JOIN bomber.call_actions bc \n ON bd.application_id = bc.application_id \n AND bd.bomber_id = bc.bomber_id \n AND date( bd.entry_at ) = date( bc.created_at ) \n WHERE\n entry_at > '%s' \n AND entry_at < '%s' \n AND partner_id IS NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n new_case_calls = run_all_sql(sql)\n if real_query_time:\n return new_case_calls\n for call in new_case_calls:\n SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber\n .bomber_id == call[0], SummaryBomber.time == begin_date).execute()\n return new_case_calls\n\n\n@time_logger\ndef get_calltime_avg(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction,\n count( 1 ) AS auto_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' ' \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n AND an.recording is not null\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]][0] += data[1]\n result[data[0]][1] += data[2]\n continue\n result[data[0]] = [data[1], data[2]]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=\n value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == key).execute()\n return result\n\n\ndef get_no_calltime_avg(begin_date, end_date):\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND (an.status!='ANSWERED' or an.recording is null) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n for data in manuals:\n SummaryBomber.update(calltime_no_case_sum=data[1],\n calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /\n data[2] if data[2] else 0).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == data[0]).execute()\n\n\n@time_logger\ndef get_calltime_sum(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' '\n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]] += data[1]\n continue\n result[data[0]] = data[1]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == key).execute()\n return result\n\n\n<mask token>\n\n\n@action(MessageAction.UPDATE_SUMMARY_NEW)\ndef update_summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'UPDATE_SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_new_case_amount(begin_date, end_date)\n get_kp_cleared(begin_date, end_date)\n get_kp_today(begin_date, end_date)\n get_ptp_cnt(begin_date, end_date)\n get_ptp_call_cnt(begin_date, end_date)\n get_new_case_cleared(begin_date, end_date)\n get_new_case_call(begin_date, end_date)\n get_calltime_avg(begin_date, end_date)\n get_no_calltime_avg(begin_date, end_date)\n get_calltime_sum(begin_date, end_date)\n\n\ndef get_cycle_claimed(begin_date, end_date):\n sql = (\n \"\"\"\n select cycle,count(1)\n from bomber.application where cycle in (1,2,3,4)\n and (finished_at is null or (finished_at>'%s'))\n and created_at>'2018-09-01'\n group by 1\n \"\"\"\n % begin_date)\n result = run_all_sql(sql)\n return result\n\n\n@time_logger\ndef cycle_new_case(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( ba.id ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.created_at ) = bo.which_day_overdue \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c1b_entry ) = bo.which_day_overdue\n WHERE\n c1b_entry > '%s' \n AND c1b_entry < '%s' \n UNION\n SELECT\n 3 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c2_entry ) = bo.which_day_overdue \n WHERE\n c2_entry > '%s' \n AND c2_entry < '%s' \n UNION\n SELECT\n 4 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c3_entry ) = bo.which_day_overdue\n WHERE\n c3_entry > '%s' \n AND c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n all_datas = run_all_sql(sql)\n if real_time_query:\n return all_datas\n for data in all_datas:\n SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()\n return all_datas\n\n\n@time_logger\ndef get_cycle_new_case_call(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.created_at ) = date( bc.created_at ) \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c1b_entry ) = date( bc.created_at ) \n WHERE\n ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s'\n UNION\n SELECT\n 3 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c2_entry ) = date( bc.created_at ) \n WHERE\n ba.c2_entry > '%s' \n AND ba.c2_entry < '%s'\n UNION\n SELECT\n 4 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c3_entry ) = date( bc.created_at ) \n WHERE\n ba.c3_entry > '%s' \n AND ba.c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n cycle_datas = run_all_sql(sql)\n if real_time_query:\n return cycle_datas\n for data in cycle_datas:\n SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber\n .time == begin_date, SummaryBomber.cycle == data[0], \n SummaryBomber.bomber_id == data[0]).execute()\n return cycle_datas\n\n\ndef get_cycle_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n '1' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.created_at ) = date( br.repay_at ) \n WHERE ba.created_at > '%s' \n AND ba.created_at < '%s' \n GROUP BY 1, 2 ) a \n UNION\n SELECT \n '2' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n GROUP BY 1, 2) a\n \"\"\"\n % (begin_date, end_date, begin_date, end_date))\n cycle_cleared = run_all_sql(sql)\n for i in cycle_cleared:\n SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber\n .cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.\n time == begin_date).execute()\n\n\n<mask token>\n\n\ndef get_change_bomber():\n cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle\n .C3.value}\n result = {}\n bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,\n BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=\n BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.\n created_at) == date.today(), BomberLog.role_id << list(\n cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.\n instalment == 0).dicts()\n for b_log in bomber_logs:\n cycle = cycle_role_map.get(b_log['role_id'])\n group_id = b_log['group_id']\n if cycle in result:\n if group_id not in result[cycle]:\n result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}\n else:\n result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}}\n if b_log['operation'] == 0:\n result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])\n if result:\n bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map\n .keys())), Bomber.is_del == 0, Bomber.instalment == 0)\n for b in bombers:\n cycle_result = result.get(cycle_role_map[b.role_id], {})\n role_result = cycle_result.get(b.group_id)\n if not role_result:\n continue\n role_result['new_ids'].append(b.id)\n resutl_list = []\n for cycle, group_dict in result.items():\n resutl_list.extend(list(group_dict.values()))\n return resutl_list\n return []\n\n\n<mask token>\n\n\ndef get_surplus_application(new_ids, del_ids, average_nums, classified_apps):\n surplus_apps = []\n for del_id in del_ids:\n del_res = classified_apps.get(del_id, {})\n p_list = del_res.get('p_list', [])\n np_list = del_res.get('np_list', [])\n del_res['need_num'] = -(len(p_list) + len(np_list))\n del_res['to_list'] = np_list + p_list\n surplus_apps.extend(p_list)\n surplus_apps.extend(np_list)\n for index, bid in enumerate(new_ids):\n average = average_nums[index]\n bomber_app = classified_apps.get(bid)\n if not bomber_app:\n bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid\n ).first()\n bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':\n [], 'np_num': 0, 'need_num': average, 'partner_id': bomber.\n partner_id if bomber else ''}\n classified_apps[bid] = bomber_app\n else:\n p_num = len(bomber_app['p_list'])\n np_num = len(bomber_app['np_list'])\n if p_num > average:\n bomber_app['need_num'] = -np_num\n else:\n bomber_app['need_num'] = average - (p_num + np_num)\n bomber_app['p_num'] = p_num\n bomber_app['np_num'] = np_num\n if bomber_app['need_num'] < 0:\n random.shuffle(bomber_app['np_list'])\n res_over = bomber_app['np_list'][:-bomber_app['need_num']]\n bomber_app['to_list'] = res_over\n surplus_apps.extend(res_over)\n classified_apps_list = sorted(classified_apps.values(), key=lambda x: x\n ['need_num'], reverse=True)\n return surplus_apps, classified_apps_list\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)\ndef bomber_dispatch_applications(payload, msg_id):\n change_bombers = get_change_bomber()\n instalment_change_bombers = get_instalment_change_bomber()\n params = {ApplicationType.CASH_LOAN.value: change_bombers,\n ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}\n for type, bombers in params.items():\n change_bomber_dispatch_apps(change_bombers=bombers, type=type)\n\n\n<mask token>\n\n\ndef get_summary_daily_time():\n mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')\n mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')\n now_date = datetime.now()\n now_date_time = now_date.time()\n today_str = str(now_date.date())\n if now_date_time < mid_time_t1.time():\n yes_date = now_date - timedelta(days=1)\n yes_date_str = str(yes_date.date())\n begin_str = yes_date_str + ' 17:20:00'\n end_str = today_str + ' 00:00:00'\n elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():\n begin_str = today_str + ' 00:00:00'\n end_str = today_str + ' 12:40:00'\n else:\n begin_str = today_str + ' 12:40:00'\n end_str = today_str + ' 17:20:00'\n begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')\n end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')\n summary_datetime = now_date - timedelta(minutes=30)\n summary_date = summary_datetime.date()\n return begin_time, end_time, summary_date\n\n\n@action(MessageAction.SUMMARY_DAILY)\ndef summary_daily_data(payload, msg_id):\n begin_time, end_time, summary_date = get_summary_daily_time()\n call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.\n bomber_id, CallActionsR.application_id, CallActionsR.promised_date,\n CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(\n CallActionsR.created_at >= begin_time, CallActionsR.created_at <\n end_time, CallActionsR.type << (0, 1))\n summary_dailys = {}\n for call in call_actions:\n if call.bomber_id not in summary_dailys:\n summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.\n bomber_id, 'summary_date': str(summary_date)}\n if call.name and call.number:\n summary_dailys[call.bomber_id]['call_cnt'] += 1\n if call.promised_date:\n summary_dailys[call.bomber_id]['ptp_cnt'] += 1\n C1_sql = (\n \"\"\"\n SELECT a.current_bomber_id,\n sum(principal_part+late_fee_part) as pay_amount,a.cycle\n from \n (select a.cycle,a.current_bomber_id,b.username,a.principal_part,\n a.late_fee_part,a.application_id,a.repay_at\n FROM bomber.repayment_log a ,bomber.bomber b\n WHERE a.repay_at >= '%s' AND a.repay_at <'%s'\n AND a.current_bomber_id !=''\n AND a.current_bomber_id = b.id\n and b.role_id in (1,2,4,5)\n and principal_part+late_fee_part>0\n group by 6,7) a\n GROUP BY a.cycle,a.current_bomber_id\n \"\"\"\n % (begin_time, end_time))\n C1_repayment = run_all_sql(C1_sql)\n other_sql = (\n \"\"\"\n select current_bomber_id,sum(pay_amount) as pay_amount,cycle\n from (\n select application_id,current_bomber_id,pay_amount,repay_at,cycle\n from (\n select br.application_id,br.current_bomber_id,\n br.principal_part+br.late_fee_part as pay_amount,br.repay_at,\n br.cycle\n from bomber.repayment_log br\n left join bomber.bomber bb on br.current_bomber_id=bb.id\n where exists (select 1 from bomber.bombing_history bb \n where br.current_bomber_id=bb.bomber_id \n and br.application_id=bb.application_id \n and bb.created_at<br.repay_at \n and (bb.promised_date is not null \n or bb.promised_amount is not null))\n and br.repay_at >= '%s'\n and br.repay_at < '%s'\n and bb.role_id in (3,6,7,8,9) \n and br.principal_part+br.late_fee_part > 0\n group by 1,4\n ) a\n group by 1,4) b\n group by 1\n \"\"\"\n % (begin_time, end_time))\n other_repayment = run_all_sql(other_sql)\n all_repayment = C1_repayment + other_repayment\n for res in all_repayment:\n bomber_id, pay_amount, cycle = res\n if bomber_id in summary_dailys:\n summary_dailys[bomber_id]['repayment'] += pay_amount\n else:\n summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': cycle, 'repayment': pay_amount, 'bomber_id':\n bomber_id, 'summary_date': str(summary_date)}\n insert_values = list(summary_dailys.values())\n if insert_values:\n SummaryDaily.insert_many(insert_values).execute()\n\n\n<mask token>\n\n\ndef month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):\n apps = list(apps)\n np_apps = list(np_apps)\n random.shuffle(np_apps)\n apps_len = len(apps)\n np_apps_len = len(np_apps)\n end = 0\n all_app_precentage = 0\n partners = Partner.select().where(Partner.cycle == cycle, Partner.\n status == PartnerStatus.NORMAL.value)\n for p in partners:\n all_app_precentage += p.app_percentage\n for partner in partners:\n bombers = Bomber.select().where(Bomber.partner == partner.id, \n Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.\n value)\n bids = {b.id: b for b in bombers}\n if len(bids) == 0:\n logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))\n continue\n start = end\n if np_apps_len >= int(apps_len * all_app_precentage):\n end = start + int(apps_len * partner.app_percentage)\n else:\n end = start + int(np_apps_len * partner.app_percentage /\n all_app_precentage)\n partner_app = np_apps[start:end]\n dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)\n np_apps = np_apps[end:]\n return np_apps\n\n\ndef month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):\n sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',\n (4): 'AB_TEST_C3'}\n sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])\n sys_values = json.loads(sys_config.value)\n bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0\n )\n if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):\n bombers = bombers.where(Bomber.instalment == 0)\n bids = {b.id: b for b in bombers}\n if cycle == Cycle.C1A.value:\n np_ids = [a['id'] for a in np_apps]\n np = Application.update(status=ApplicationStatus.PROCESSING.value,\n ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids\n ).execute()\n bomber_app_logs = app_logs.get(cycle, {})\n out_param = {'application_ids': bomber_app_logs.get('to_ids', []),\n 'month_dispatch': 1, 'src_bomber_id': cycle}\n new_out_record(**out_param)\n in_param = {'cycle': cycle, 'application_ids': np_ids,\n 'dest_bomber_id': cycle}\n new_in_record(**in_param)\n bomber_app_logs['need_num'] = len(np_apps)\n bomber_app_logs['form_ids'] = np_ids\n bomber_app_logs['status'] = 1\n else:\n dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)\n dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)\n\n\ndef dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,\n type=ApplicationType.CASH_LOAN.value):\n apps = list(apps)\n random.shuffle(apps)\n bids_list = list(bids.keys())\n if len(bids_list) <= 0:\n logging.info('get_dispatch_app_to_bomber no bids')\n return\n average_num = get_average_number(len(apps), len(bids_list))\n bomber_end = 0\n with db.atomic():\n for index, bid in enumerate(bids_list):\n current_bomber = bids.get(bid)\n bomber_app_logs = app_logs.get(bid, {})\n bomber_start = bomber_end\n bomber_end = bomber_start + average_num[index]\n bomber_apps = apps[bomber_start:bomber_end]\n from_p, from_np, from_ids, status = [], [], [], 0\n for ba in bomber_apps:\n promised_date = ba.get('promised_date')\n from_ids.append(ba['id'])\n if promised_date and promised_date.date() >= date.today():\n from_p.append(ba['id'])\n else:\n from_np.append(ba['id'])\n app_status = ApplicationStatus.AB_TEST.value\n if (cycle == Cycle.C1A.value and not out_partner and type ==\n ApplicationType.CASH_LOAN.value):\n app_status = ApplicationStatus.PROCESSING.value\n if from_p:\n p = Application.update(ptp_bomber=bid, latest_bomber=bid,\n status=app_status).where(Application.id << from_p).execute(\n )\n p_ids = bomber_app_logs.get('p_ids', []) + from_p\n bomber_app_logs['p_ids'] = p_ids\n if from_np:\n np = Application.update(latest_bomber=bid, ptp_bomber=None,\n status=ApplicationStatus.AB_TEST.value).where(\n Application.id << from_np).execute()\n np_ids = bomber_app_logs.get('np_ids', []) + from_np\n bomber_app_logs['np_ids'] = np_ids\n in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.\n partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}\n if type == ApplicationType.CASH_LOAN.value:\n out_param = {'src_bomber_id': bid, 'application_ids':\n bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}\n new_out_record(**out_param)\n new_in_record(**in_param)\n else:\n out_and_in_record_instalment(**in_param)\n bomber_app_logs['status'] = 1\n need_num = bomber_app_logs.get('need_num', 0) + average_num[index]\n bomber_app_logs['need_num'] = need_num\n all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids\n bomber_app_logs['form_ids'] = all_form_ids\n if not out_partner:\n continue\n try:\n DispatchApp.delete().where(DispatchApp.application.in_(\n from_ids)).execute()\n dispatch_ins = [{'application': id, 'partner':\n current_bomber.partner_id, 'bomber': bid, 'status':\n DisAppStatus.NORMAL.value} for id in from_ids]\n DispatchApp.insert_many(dispatch_ins).execute()\n except Exception as e:\n logging.info(\n 'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str\n (e), bid, from_ids))\n\n\n<mask token>\n\n\ndef calc_entry_time(overdue_days):\n app_entry_time = {}\n overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],\n 'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}\n for key, value in overdue_entry.items():\n if value[0] <= overdue_days <= value[1]:\n app_entry_time[key] = datetime.now()\n else:\n app_entry_time[key] = None\n return app_entry_time\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_BOMBER_OVERDUE)\ndef summary_bomber_overdue_everyday(payload, msg_id):\n cycle_list = Cycle.values()\n which_day = date.today()\n for cycle in cycle_list:\n apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,\n ApplicationR.ptp_bomber, ApplicationR.overdue_days,\n ApplicationR.promised_date, ApplicationR.follow_up_date,\n ApplicationR.external_id, OverdueBillR.status, OverdueBillR.\n periods, OverdueBillR.sub_bill_id).join(OverdueBillR,\n JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id\n ).where(ApplicationR.status != ApplicationStatus.REPAID.value, \n ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()\n bomber_overdue_list = []\n for app in apps:\n status = app.get('status')\n if status == ApplicationStatus.REPAID.value:\n continue\n ptp_bomber = app.get('ptp_bomber')\n promised_date = app.get('promised_date')\n follow_up_date = app.get('follow_up_date')\n if not promised_date or promised_date.date() < date.today():\n ptp_bomber = promised_date = None\n if not follow_up_date or follow_up_date.date() < date.today():\n follow_up_date = None\n overdue_dict = {'collection_id': app.get('id'), 'external_id':\n app.get('external_id'), 'sub_bill_id': app.get(\n 'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app\n .get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':\n ptp_bomber, 'promised_date': promised_date,\n 'follow_up_date': follow_up_date, 'which_day': which_day,\n 'overdue_days': app.get('overdue_days')}\n bomber_overdue_list.append(overdue_dict)\n try:\n if bomber_overdue_list:\n with db.atomic():\n for index in range(0, len(bomber_overdue_list), 1000):\n insert_list = bomber_overdue_list[index:index + 1000]\n BomberOverdue.insert_many(insert_list).execute()\n except Exception as e:\n logging.info(\n 'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %\n (cycle, str(which_day), str(e)))\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)\ndef today_ptp_auto_call_switch(payload, msg_id):\n today = datetime.today().date()\n next_day = today + timedelta(days=1)\n apps = ApplicationR.select(ApplicationR.latest_bomber).where(\n ApplicationR.promised_date < next_day, ApplicationR.promised_date >=\n today, ApplicationR.promised_date.is_null(False), ApplicationR.\n status != ApplicationStatus.REPAID.value, ApplicationR.cycle <\n Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(\n ApplicationR.latest_bomber)\n bids = [a.latest_bomber_id for a in apps]\n if not bids:\n return\n q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(\n BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids\n ).execute()\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)\ndef update_today_switch_every_day(payload, msg_id):\n q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(\n BomberPtp.auto_ext.is_null(False)).execute()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@action(MessageAction.BOMBER_HEALTH_CHECK)\ndef health_check(payload, msg_id):\n pass\n\n\n<mask token>\n\n\ndef dpd1_process(lst):\n \"\"\"已废弃的方法\"\"\"\n if not lst:\n return\n for key, l in lst.items():\n rule = getattr(BeforeInBomber, key).value\n query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)\n ).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(\n rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.\n call_success()))\n success_set = {i.loanid for i in query}\n failed_list = list(set(l) - success_set)\n post_params = {'$and': rule.get('$and'), 'app_list': failed_list}\n resp = Hyperloop().post('/bomber/score/verify', json=post_params)\n if not resp.ok:\n logging.error('hyperloop score verification failed: %s, %s',\n str(resp.status_code), str(resp.text))\n logging.error('hyperloop score verification failed: %s', str(\n post_params))\n continue\n logging.debug('hyperloop score verification success: %s', resp.content)\n resp_json = resp.json()\n app_list = resp_json['data']\n if not app_list:\n continue\n for item in app_list:\n if random.randint(0, 5) == 1:\n send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':\n int(item)})\n\n\n@action(MessageAction.GET_IVR)\ndef get_ivr(payload, msg_id):\n logging.warning('start get_ivr')\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'DPD1-3_INTO_IVR').first()\n now = date.today()\n if sys_config and sys_config.value:\n start = now - timedelta(days=3)\n else:\n start = now\n end = now + timedelta(days=4)\n item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()\n if not item:\n AutoIVR.delete().execute()\n current_page = 0\n elif item.current_page >= item.total_page:\n return\n else:\n current_page = item.current_page\n auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,\n 'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,\n 'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':\n 10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,\n 'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,\n 'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,\n 'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,\n 'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,\n 'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,\n 'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,\n 'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,\n 'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,\n 'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,\n 'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':\n 101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,\n 'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,\n 'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,\n 'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}\n current_page += 1\n with db.atomic() as transaction:\n while True:\n bill_service = BillService()\n ivr_action = bill_service.ivr_pages(page=current_page,\n page_size=500, start_time=utc_datetime(str(start)),\n end_time=utc_datetime(str(end)))\n result = ivr_action['result']\n page_size = int(ivr_action.get('page_size', 0))\n total_page = int(ivr_action.get('total_page', 0))\n insert_args = []\n for a in result:\n due_at = no_utc_datetime(a['due_at'])\n days = (due_at.date() - now).days\n if days == 2:\n continue\n if days > 0:\n time = str(days)\n else:\n time = str(days).replace('-', 'PDP')\n key = a['app_name'] + str(a['su']) + time\n group = auto_ivr.get(key)\n user_id = a['user_id']\n try:\n user_resp = AccountService().get_user(path_params={\n 'user_id': user_id})\n if str(user_resp['mobile_no']) == str(a['user_mobile_no']):\n numbers = a['user_mobile_no']\n else:\n numbers = a['user_mobile_no'] + ',' + user_resp.get(\n 'mobile_no')\n except:\n logging.error('Request Account Service Error.')\n numbers = a['user_mobile_no']\n insert_args.append({'application_id': a['id'], 'numbers':\n numbers, 'group': group, 'user_id': user_id})\n AutoIVR.insert_many(insert_args).execute()\n if current_page == 1:\n IVRActionLog.create(total_page=total_page, proc_date=now,\n page_size=page_size, current_page=current_page)\n item = IVRActionLog.get(IVRActionLog.proc_date == now)\n else:\n item.current_page = current_page\n item.page_size = page_size\n item.total_page = total_page\n item.save()\n transaction.commit()\n current_page += 1\n if current_page > int(total_page):\n break\n if sys_config and sys_config.value:\n try:\n classfiy_dpd_ptp_apps()\n except Exception as e:\n logging.error('dpd1-3_test_error:%s' % str(e))\n\n\ndef ivr_t2_test():\n t2_groups = [39, 40, 41, 42, 43, 44]\n ivr_test_proportion = 0.2\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'IVR_TEST_PROPORTION').first()\n if sys_config and sys_config.value:\n ivr_test_proportion = float(sys_config.value)\n t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.\n status == AutoIVRStatus.AVAILABLE.value)\n t2_dict = defaultdict(list)\n for ivr in t2_ivrs:\n t2_dict[ivr.group].append(ivr.id)\n test_ivr_ids = []\n for group, ivr_ids in t2_dict.items():\n number = ceil(len(ivr_ids) * ivr_test_proportion)\n test_ivr_ids += ivr_ids[:number]\n if not test_ivr_ids:\n return\n q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.\n group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.APP_MERGE)\n@deprecated(version='1.0', reason='This function will be removed soon')\ndef app_merge(payload, msg_id):\n sql = \"\"\"\n select *\n from (\n select a.id as id\n from dashboard.application as a\n inner join repayment.bill2 as b on b.external_id = a.id\n where not exists (\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.app = 'DanaCepat'\n and a.is_first_loan = 1\n and a.apply_at < '2018-08-23 20:50:00'\n and b.overdue_days between 1 and 3\n and b.status != 2) result\n where not exists (\n select 1\n from bomber.application as a\n where a.cycle = 1\n and a.status = 4\n and a.id = result.id\n )\n \"\"\"\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n new_data = cursor.fetchall()\n cursor.close()\n if new_data:\n bomber = [103, 104]\n for d in new_data:\n app_id = {'id': d[0]}\n application_overdue(app_id, None)\n Application.update(status=ApplicationStatus.AB_TEST.value,\n latest_bomber=random.choice(bomber), ptp_bomber=None).where(\n Application.id == d[0]).execute()\n logging.warning('add new app success')\n ptp = date.today() - timedelta(days=1)\n del_sql = (\n \"\"\"\n select a.id\n from bomber.application as a\n where exists(\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.cycle = 1\n and a.status = 4\n and (a.promised_date is null or a.promised_date < \"%s\")\n \"\"\"\n % ptp)\n cursor = readonly_db.get_cursor()\n cursor.execute(del_sql)\n del_date = cursor.fetchall()\n cursor.close()\n if del_date:\n return\n ids = list()\n for d in del_date:\n ids.append(d[0])\n Application.update(status=ApplicationStatus.UNCLAIMED.value,\n latest_bomber=None).where(Application.id << ids).execute()\n\n\n@action(MessageAction.APPLICATION_BOMBER)\ndef application_overdue(payload, msg_id):\n application_id = payload['id']\n sub_bill_id = payload['bill_sub_id']\n local_app = Application.select().where(Application.external_id ==\n application_id).order_by(Application.finished_at).first()\n if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:\n logging.info('application %s overdue, already exists', application_id)\n add_contact(local_app)\n return\n if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:\n overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==\n sub_bill_id, OverdueBillR.external_id == application_id)\n if overdue_bill.exists():\n logging.info(\n 'application %s,sub_bill_id %s overdue, already exists' % (\n application_id, sub_bill_id))\n return\n try:\n sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])\n sub_bill = sub_bill[0]\n except Exception:\n logging.error(\n 'application %s overdue, get sub_bill info failed:Request To repayment Error'\n , application_id)\n return\n if sub_bill['status'] == 2:\n logging.error('application %s overdue, but bills already cleared',\n application_id)\n return\n overdue_days = sub_bill.get('overdue_days', 0)\n if overdue_days == 0:\n logging.info('application {} no overdue'.format(str(application_id)))\n return\n gold_eye = GoldenEye().get('/applications/%s' % application_id)\n if not gold_eye.ok:\n logging.error('get application %s failed: Request to GoldenEye.',\n application_id)\n return\n gold_app = gold_eye.json().get('data')\n user_id = gold_app['user_id']\n apply_history = Dashboard().get('/users/%s/apply-history' % user_id)\n if not apply_history.ok:\n logging.error(\n 'get user %s apply history failed: Request to Dashboard Failed.',\n user_id)\n return\n history = apply_history.json().get('data')\n loan_success_times = len([(1) for i in history if i['status'] in [80, \n 90, 100, 70] and i['id'] != gold_app['id']])\n id = application_id\n type = ApplicationType.CASH_LOAN.value\n bill_id = sub_bill.get('bill_id')\n amount = sub_bill.get('amount')\n amount_net = sub_bill.get('amount_net')\n interest_rate = sub_bill.get('interest_rate')\n overdue_days = sub_bill.get('overdue_days')\n origin_due_at = sub_bill.get('origin_due_at')\n sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,\n 'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),\n 'overdue_days': overdue_days, 'origin_due_at': origin_due_at,\n 'amount': amount, 'amount_net': amount_net, 'interest_rate':\n interest_rate, 'external_id': application_id}\n if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:\n if local_app and local_app.status != ApplicationStatus.REPAID.value:\n sub_overdue_bill['collection_id'] = local_app.id\n local_app.amount += amount\n local_app.amount_net += amount_net\n local_app.save()\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('application %s,sub_bill_id:%s overdue created' %\n (application_id, sub_bill_id))\n return\n else:\n id = idg()\n type = ApplicationType.CASH_LOAN_STAGING.value\n sub_overdue_bill['collection_id'] = id\n ptp_info = BombingHistory.filter(BombingHistory.application == id).first()\n promised_amount = ptp_info and ptp_info.promised_amount\n promised_date = ptp_info and ptp_info.promised_date\n application = Application.create(id=id, user_id=gold_app['user_id'],\n user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[\n 'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],\n contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(\n 'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=\n birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(\n gold_app.get('id_ektp')), profile_province=(gold_app.get(\n 'profile_province') or {}).get('name'), profile_city=(gold_app.get(\n 'profile_city') or {}).get('name'), profile_district=(gold_app.get(\n 'profile_district') or {}).get('name'), profile_residence_time=\n gold_app.get('profile_residence_time'), profile_residence_type=\n gold_app.get('profile_residence_type'), profile_address=gold_app.\n get('profile_address'), profile_education=gold_app.get(\n 'profile_education'), profile_college=(gold_app.get(\n 'profile_college') or {}).get('name'), job_name=gold_app.get(\n 'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get\n ('job_bpjs'), job_user_email=gold_app.get('job_user_email'),\n job_type=gold_app.get('job_type'), job_industry=gold_app.get(\n 'job_industry'), job_department=gold_app.get('job_department'),\n job_province=(gold_app.get('job_province') or {}).get('name'),\n job_city=(gold_app.get('job_city') or {}).get('name'), job_district\n =(gold_app.get('job_district') or {}).get('name'), job_address=\n gold_app.get('job_address'), amount=amount, amount_net=amount_net,\n interest_rate=interest_rate, term=gold_app.get('term'),\n origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=\n sub_bill.get('repay_at'), loan_success_times=loan_success_times,\n arrived_at=datetime.now(), follow_up_date=datetime.now(),\n promised_amount=promised_amount, promised_date=promised_date,\n external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=\n datetime.now())\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('overdue application %s created', application_id)\n Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,\n status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)\n add_contact(application)\n\n\ndef add_contact(application):\n logging.info('start add contact for application: %s', application.id)\n contacts = Contact.filter(Contact.user_id == application.user_id)\n existing_numbers = {contact.number for contact in contacts}\n insert_contacts = list()\n mon_insert_contact = {}\n user_mobile_no = number_strip(application.user_mobile_no)\n if user_mobile_no and user_mobile_no not in existing_numbers:\n insert_contacts.append({'user_id': application.user_id, 'name':\n application.user_name, 'number': user_mobile_no, 'relationship':\n Relationship.APPLICANT.value, 'source': 'apply info',\n 'real_relationship': Relationship.APPLICANT.value})\n existing_numbers.add(number_strip(application.user_mobile_no))\n extra_phone = GoldenEye().get('/users/%s/extra-phone' % application.user_id\n )\n if not extra_phone.ok:\n extra_phone = []\n logging.error('get user %s extra contacts failed', application.user_id)\n else:\n extra_phone = extra_phone.json()['data']\n if extra_phone:\n for i in extra_phone:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({'user_id': application.user_id, 'name':\n application.user_name, 'number': number, 'relationship':\n Relationship.APPLICANT.value, 'source': 'extra phone',\n 'real_relationship': Relationship.APPLICANT.value})\n key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n ec_contact = []\n contact = json.loads(application.contact or '[]')\n for i in contact:\n if number_strip(i['mobile_no']\n ) not in existing_numbers and number_strip(i['mobile_no']):\n ec_contact.append({'user_id': application.user_id, 'name': i[\n 'name'], 'number': number_strip(i['mobile_no']),\n 'relationship': Relationship.FAMILY.value, 'sub_relation':\n SubRelation.EC.value, 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number_strip(i['mobile_no']\n ), ContactType.F_EC.value\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['mobile_no']))\n if i['type'] != 1:\n continue\n if number_strip(i['tel_no']) not in existing_numbers and number_strip(i\n ['tel_no']):\n ec_contact.append({'user_id': application.user_id, 'name': i[\n 'name'], 'number': number_strip(i['tel_no']),\n 'relationship': Relationship.FAMILY.value, 'sub_relation':\n SubRelation.EC.value, 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number_strip(i['tel_no']\n ), ContactType.F_EC.value\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['tel_no']))\n if ec_contact:\n Contact.insert_many(ec_contact).execute()\n if all((application.job_tel, number_strip(application.job_tel), \n number_strip(application.job_tel) not in existing_numbers)):\n insert_contacts.append({'user_id': application.user_id, 'name':\n None, 'number': number_strip(application.job_tel),\n 'relationship': Relationship.COMPANY.value, 'source':\n 'basic info job_tel', 'real_relationship': Relationship.COMPANY\n .value})\n key = user_mobile_no, number_strip(application.job_tel\n ), ContactType.C_BASIC_INFO_JOB_TEL.value\n mon_insert_contact[key] = 1, 0, None\n existing_numbers.add(number_strip(application.job_tel))\n sms_contacts = GoldenEye().get('/applications/%s/sms-contacts' %\n application.external_id)\n if not sms_contacts.ok:\n sms_contacts = []\n logging.info('get user %s sms contacts failed', application.external_id\n )\n else:\n sms_contacts = sms_contacts.json()['data']\n if sms_contacts:\n for i in sms_contacts:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({'user_id': application.user_id, 'name':\n i['name'][:128], 'number': number, 'relationship':\n Relationship.SUGGESTED.value, 'source': 'sms contacts',\n 'real_relationship': Relationship.SUGGESTED.value})\n key = user_mobile_no, number, ContactType.S_SMS_CONTACTS.value\n mon_insert_contact[key] = 1, 0, i['name'][:128]\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n cf = GoldenEye().get('/applications/%s/call/frequency' % application.\n external_id)\n if not cf.ok:\n call_frequency = []\n logging.error('get application %s call frequency error',\n application.external_id)\n else:\n call_frequency = cf.json()['data']\n insert_contacts = []\n fm = GoldenEye().get('/applications/%s/contact/family-member' %\n application.external_id)\n if not fm.ok:\n family = []\n logging.error('get application %s family-member info error',\n application.external_id)\n else:\n family = fm.json()['data']\n if family:\n for i in family:\n if not i.get('number'):\n logging.info('family member %s' % str(i))\n continue\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n logging.info('family members: %s' % str(i))\n insert_contacts.append({'user_id': application.user_id, 'name':\n i['name'][:128], 'number': number, 'relationship':\n Relationship.FAMILY.value, 'source': FamilyContactType.\n CALLEC.value, 'total_count': i.get('total_count', 1),\n 'total_duration': i.get('total_duration', 0),\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number, ContactType.F_CALL_EC.value\n mon_insert_contact[key] = i.get('total_count', 1), i.get(\n 'total_duration', 0), i['name'][:128]\n existing_numbers.add(number)\n mon_update_contact = {}\n if call_frequency:\n with db.atomic():\n count = 1\n for i in call_frequency:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n Contact.update(total_count=i['total_count'],\n total_duration=i['total_duration']).where(Contact.\n number == number, Contact.user_id == application.\n user_id)\n key = user_mobile_no, number\n mon_update_contact[key] = i['total_count'], i[\n 'total_duration']\n continue\n if count < 6:\n insert_contacts.append({'user_id': application.user_id,\n 'name': i['name'][:128], 'number': number,\n 'relationship': Relationship.FAMILY.value,\n 'total_count': i['total_count'], 'total_duration':\n i['total_duration'], 'source': FamilyContactType.\n CALLTOP5.value, 'real_relationship': Relationship.\n FAMILY.value})\n count += 1\n key = user_mobile_no, number, ContactType.F_CALL_TOP5.value\n mon_insert_contact[key] = i['total_count'], i[\n 'total_duration'], i['name'][:128]\n else:\n insert_contacts.append({'user_id': application.user_id,\n 'name': i['name'][:128], 'number': number,\n 'relationship': Relationship.SUGGESTED.value,\n 'total_count': i['total_count'], 'total_duration':\n i['total_duration'], 'source': 'call frequency',\n 'real_relationship': Relationship.SUGGESTED.value})\n key = (user_mobile_no, number, ContactType.\n S_CALL_FREQUENCY.value)\n mon_insert_contact[key] = i['total_count'], i[\n 'total_duration'], i['name'][:128]\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n next_apply_list = AccountService().add_contact(application.user_id)\n for next_apply in next_apply_list:\n number = number_strip(str(next_apply))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=application.\n user_name, number=number, relationship=Relationship.\n SUGGESTED.value, source='online profile phone',\n real_relationship=Relationship.SUGGESTED.value)\n key = (user_mobile_no, number, ContactType.\n S_ONLINE_PROFILE_PHONE.value)\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n next_applicant = GoldenEye().get('/bomber/%s/dual_contact' %\n application.user_id)\n if not next_applicant.ok:\n next_applicant = []\n logging.error('get user %s dual_contact contacts failed' %\n application.user_id)\n else:\n next_applicant = next_applicant.json()['data']\n if next_applicant:\n for i in next_applicant:\n number = number_strip(str(i))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=\n application.user_name, number=number, relationship=\n Relationship.APPLICANT.value, source='apply info',\n real_relationship=Relationship.APPLICANT.value)\n key = user_mobile_no, number, ContactType.A_APPLY_INFO.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n numbers = []\n try:\n numbers = AccountService().ktp_number(path_params={'user_id':\n application.user_id})\n except Exception as e:\n logging.info('request ktp numbers failed %s' % str(e))\n for n in numbers:\n number = number_strip(str(n))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=application.\n user_name, number=number, relationship=Relationship.\n APPLICANT.value, source='ktp number', real_relationship=\n Relationship.APPLICANT.value)\n key = user_mobile_no, number, ContactType.A_KTP_NUMBER.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n try:\n ecs = GoldenEye().get('/applications/%s/contact/ec' % application.\n external_id)\n except Exception as e:\n logging.info('request ec-member error: %s' % str(e))\n try:\n if not ecs.ok:\n ec = []\n logging.info('get application %s ec-member info error',\n application.external_id)\n else:\n ec = ecs.json()['data']\n if ec:\n for e in ec:\n number = number_strip(e['numbers'])[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=e[\n 'name'][:128], number=number, relationship=\n Relationship.FAMILY.value, source=FamilyContactType\n .CONTACTEC.value, real_relationship=Relationship.\n FAMILY.value)\n key = (user_mobile_no, number, ContactType.F_CONTACT_EC\n .value)\n mon_insert_contact[key] = 1, 0, e['name'][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add ec_member error:%s' % str(e))\n try:\n mn = GoldenEye().get('/applications/%s/contact/my_number' %\n application.external_id)\n except Exception as e:\n logging.info('request my_number error: %s' % str(e))\n try:\n if not mn.ok:\n my = []\n logging.info('get application %s my_number info error',\n application.external_id)\n else:\n my = mn.json()['data']\n if my:\n for m in my:\n number = number_strip(m)[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=my[m][\n :128], number=number, relationship=Relationship.\n SUGGESTED.value, source='my number',\n real_relationship=Relationship.SUGGESTED.value)\n key = user_mobile_no, number, ContactType.S_MY_NUMBER.value\n mon_insert_contact[key] = 1, 0, my[m][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add my_member error:%s' % str(e))\n try:\n cn = GoldenEye().get('/applications/%s/contact/company-number' %\n application.external_id)\n except Exception as e:\n logging.info('request company-number error: %s' % str(e))\n try:\n if not cn.ok:\n cn = []\n logging.info('get application %s company_number info error',\n application.external_id)\n else:\n cn = cn.json()['data']\n if cn:\n for c in cn:\n number = c\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=cn[c][\n :128], number=number, relationship=Relationship.\n COMPANY.value, source='company', real_relationship=\n Relationship.COMPANY.value)\n key = user_mobile_no, number, ContactType.C_COMPANY.value\n mon_insert_contact[key] = 1, 0, cn[c][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add company_member error:%s' % str(e))\n try:\n ol = AccountService().other_login_contact(userId=application.user_id)\n except Exception as e:\n logging.error('request other_login error: %s' % e)\n ol = {}\n try:\n for o in ol:\n number = number_strip(o)\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=ol[o][:128\n ], number=number, relationship=Relationship.SUGGESTED.\n value, source='other_login', real_relationship=\n Relationship.SUGGESTED.value)\n key = user_mobile_no, number, ContactType.S_OTHER_LOGIN.value\n mon_insert_contact[key] = 1, 0, ol[o][:128]\n except Exception as e:\n logging.error('add other_login number error:%s' % e)\n logging.info('add contact for application %s finished', application.id)\n if mon_insert_contact or mon_update_contact:\n send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON, {\n 'user_mobile_no': user_mobile_no, 'insert_contact': str(\n mon_insert_contact), 'update_contact': str(mon_update_contact),\n 'user_id': application.user_id, 'name': application.user_name})\n\n\n<mask token>\n\n\ndef get_contact_from_mongo(number):\n if not number:\n return []\n query = TotalContact.objects(src_number=number, source__in=TotalContact\n .available()).order_by('source')\n lst = []\n for c in query:\n relation = TotalContact.relationship(c.source)\n if relation == -1:\n continue\n source = TotalContact.str_source(c.source)\n if not source:\n continue\n lst.append({'related_number': c.dest_number, 'source': source,\n 'is_calc': c.is_calc, 'total_count': c.total_count,\n 'total_duration': c.total_duration, 'relation': relation,\n 'name': c.dest_name})\n return lst\n\n\n<mask token>\n\n\ndef check_key_not_none(payload, keys):\n for key in keys:\n if payload.get(key) is None:\n logging.error('Missing args {}'.format(str(key)))\n return False\n return True\n\n\n@action(MessageAction.BILL_PAID)\ndef bill_paid(payload, msg_id):\n validate = check_key_not_none(payload, ['external_id', 'late_fee_part',\n 'principal_part', 'paid_at', 'bill_sub_id', 'partner_bill_id'])\n if not validate:\n logging.error('payload key not fully pass in.')\n return\n external_id = payload['external_id']\n late_fee_part = Decimal(payload['late_fee_part'])\n principal_part = Decimal(payload['principal_part'])\n paid_at = payload['paid_at']\n partner_bill_id = payload['partner_bill_id']\n logging.debug(\n 'application %s paid principal part %s, paid late fee part %s',\n external_id, principal_part, late_fee_part)\n application = Application.filter(Application.external_id == external_id\n ).order_by(-Application.created_at).first()\n if not application:\n logging.info('application %s paid, not found application', external_id)\n return\n sub_bill_id = payload['bill_sub_id']\n overdue_bill = OverdueBillR.select().where(OverdueBillR.collection_id ==\n application.id, OverdueBillR.sub_bill_id == sub_bill_id).first()\n if (application.type == ApplicationType.CASH_LOAN_STAGING.value and not\n overdue_bill):\n logging.info('bill sub not in bomber %s', sub_bill_id)\n return\n with db.atomic():\n repay_at = str_no_utc_datetime(payload['latest_repay_at'])\n Application.update(repay_at=repay_at).where(Application.id ==\n application.id).execute()\n RepaymentLog.create(application=application.id, is_bombed=True,\n current_bomber=application.latest_bomber_id, cycle=application.\n cycle, principal_part=principal_part, late_fee_part=\n late_fee_part, repay_at=paid_at, ptp_bomber=application.\n ptp_bomber, latest_call=application.latest_call, periods=\n overdue_bill.periods if overdue_bill else None, overdue_bill_id\n =overdue_bill.id if overdue_bill else None, partner_bill_id=\n partner_bill_id)\n phone_status = PhoneStatus.CONNECTED.value\n real_relationship = RealRelationship.user_values()\n commit = CallActionCommit.NO.value\n number = CallActions.select(CallActions.number).where(CallActions.\n phone_status == phone_status, CallActions.real_relationship <<\n real_relationship, CallActions.commit == commit, CallActions.\n application == application.id).order_by(-CallActions.created_at\n ).first()\n if number:\n Contact.update(call_priority=PriorityStatus.REPAY.value).where(\n Contact.user_id == application.user_id, Contact.\n call_priority == PriorityStatus.LAST.value).execute()\n Contact.update(call_priority=PriorityStatus.LAST.value).where(\n Contact.user_id == application.user_id, Contact.number ==\n number.number).execute()\n if not application.latest_bomber_id:\n return\n Inbox.create(title='application %s,sub_bill_id %s repaid' % (\n application.external_id, sub_bill_id), content=\n 'application %s,sub_bill_id %s repaid' % (application.\n external_id, sub_bill_id), receiver=application.\n latest_bomber_id or application.last_bomber_id, category=\n InboxCategory.REPAID.value)\n\n\n@action(MessageAction.BILL_RELIEF)\ndef bill_relief(payload, msg_id):\n \"\"\"已废弃\"\"\"\n bill = payload['head_bill']\n repay_at = str_no_utc_datetime(bill['latest_repay_at'])\n updated_row = Application.update(repay_at=repay_at).where(Application.\n id == bill['external_id']).execute()\n logging.info('application %s bill relief done', bill['external_id'])\n return updated_row\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)\ndef calc_overdue_days_over(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query = Application.update(overdue_days=overdue_days).where(Application\n .status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.\n overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)\n updated_rows_count = query.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count)\n try:\n calc_overdue_days_over_instalment()\n except Exception as e:\n logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))\n apps = Application.filter(Application.status << [ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value], Application.overdue_days > 95, \n Application.promised_date.is_null(True) | (fn.DATE(Application.\n promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {\n 'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n\n\ndef calc_overdue_days_over_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days > 95)\n updated_rows_count = query.execute()\n logging.info(\n 'calc_overdue_days_over_instalment done,count:%s,status:%s' % (\n updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.collection_id,\n OverdueBill.overdue_days).join(Application, JOIN_LEFT_OUTER, on\n =OverdueBill.collection_id == Application.id).where(Application\n .status == status, Application.type == ApplicationType.\n CASH_LOAN_STAGING.value)\n app_update = {}\n for ob in overdue_bills:\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n<mask token>\n\n\ndef calc_overdue_days_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n today_now_time = datetime.now()\n month_first_day = today_now_time.replace(day=1, hour=1, minute=30,\n second=0, microsecond=0)\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days <= 95)\n updated_rows_count = query.execute()\n logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %\n (updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.\n created_at, OverdueBill.collection_id, OverdueBill.overdue_days\n ).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.\n collection_id == Application.id).where(Application.status ==\n status, Application.type == ApplicationType.CASH_LOAN_STAGING.value\n )\n app_update = {}\n for ob in overdue_bills:\n if (ob.status == ApplicationStatus.REPAID.value and ob.\n created_at < month_first_day):\n continue\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)\ndef automatic_escalation(payload, msg_id):\n app_ids = payload.get('application_list', [])\n if not app_ids:\n return\n apps = Application.select().where(Application.id.in_(app_ids), \n Application.status != ApplicationStatus.REPAID.value)\n for a in apps:\n new_cycle = application_entry_different_calculations(a)\n if a.overdue_days < 90:\n logging.info(\n 'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'\n .format(a.id, new_cycle, a.cycle, a.overdue_days))\n if new_cycle > a.cycle:\n with db.atomic():\n if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle\n .C1B.value):\n bomber_id = (a.latest_bomber_id if a.latest_bomber_id else\n a.cycle)\n DispatchAppHistory.update(out_at=datetime.now(),\n out_overdue_days=a.overdue_days).where(\n DispatchAppHistory.application == a.id, \n DispatchAppHistory.bomber_id == bomber_id).execute()\n Escalation.create(application=a.id, type=EscalationType.\n AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,\n current_cycle=a.cycle, escalate_to=new_cycle,\n current_bomber_id=a.latest_bomber)\n dis_app_update = DispatchApp.update(status=DisAppStatus.\n ABNORMAL.value).where(DispatchApp.application == a.id)\n dis_app_update.execute()\n a.cycle = new_cycle\n a.last_bomber = a.latest_bomber\n a.status = ApplicationStatus.UNCLAIMED.value\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n if new_cycle == Cycle.C1B.value:\n a.C1B_entry = datetime.now()\n elif new_cycle == Cycle.C2.value:\n a.C2_entry = datetime.now()\n elif new_cycle == Cycle.C3.value:\n a.C3_entry = datetime.now()\n a.save()\n logging.info('automatic escalation done')\n\n\ndef application_entry_different_calculations(app):\n conf = {(1): [1, 10], (2): [11, 30], (3): [31, 60], (4): [61, 90], (5):\n [91, 999999]}\n for new_cycle, scopes in conf.items():\n if scopes[0] <= app.overdue_days <= scopes[1]:\n return new_cycle\n return app.cycle\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY)\ndef cron_summary(payload, msg_id):\n \"\"\"已废弃\"\"\"\n employees = Bomber.select(Bomber, Role).join(Role)\n summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,\n 'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,\n 'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':\n 0, 'sms_sent': 0} for i in employees}\n now_date = date.today()\n cal_date = now_date - timedelta(days=1)\n claimed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('claimed')).where(fn.DATE(Application.\n claimed_at) == cal_date, Application.status << [ApplicationStatus.\n PROCESSING.value, ApplicationStatus.REPAID.value], Application.\n latest_bomber.is_null(False)).group_by(Application.latest_bomber)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False)).group_by(\n Application.latest_bomber)\n completed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('completed')).where(Application.\n latest_bombing_time.is_null(False), fn.DATE(Application.\n latest_bombing_time) == cal_date, Application.latest_bomber.is_null\n (False)).group_by(Application.latest_bomber)\n escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(\n Escalation.id).alias('escalated')).where(fn.DATE(Escalation.\n created_at) == cal_date, Escalation.type == EscalationType.\n AUTOMATIC.value, Escalation.current_bomber.is_null(False), \n Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation\n .current_bomber)\n transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).\n alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==\n cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(\n Transfer.operator)\n promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(\n BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.\n created_at) == cal_date, BombingHistory.result == BombingResult.\n HAS_PROGRESS.value).group_by(BombingHistory.bomber)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,\n RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.\n current_bomber)\n calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id\n ).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==\n cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)\n calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.\n record_id).alias('calls_connected')).where(fn.DATE(CallLog.\n time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==\n '1').group_by(CallLog.user_id)\n sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(\n ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_\n (ConnectType.sms()), ConnectHistory.created_at >= cal_date, \n ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)\n for i in claimed:\n summary[i.latest_bomber_id]['claimed'] += i.claimed\n for i in completed:\n summary[i.latest_bomber_id]['completed'] += i.completed\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n for i in escalated:\n summary[i.current_bomber_id]['escalated'] += i.escalated\n for i in transferred:\n summary[i.operator_id]['transferred'] += i.transferred\n for i in promised:\n summary[i.bomber_id]['promised'] += i.promised\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in calls_made:\n summary[int(i.user_id)]['calls_made'] += i.calls_made\n for i in calls_connected:\n summary[int(i.user_id)]['calls_connected'] += i.calls_connected\n for i in sms_sent:\n summary[i.operator_id]['sms_sent'] += i.sms_sent\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'claimed': data['claimed'], 'completed': data['completed'],\n 'cleared': data['cleared'], 'escalated': data['escalated'],\n 'transferred': data['transferred'], 'promised': data['promised'\n ], 'amount_recovered': data['amount_recovered'], 'calls_made':\n data['calls_made'], 'calls_connected': data['calls_connected'],\n 'sms_sent': data['sms_sent'], 'date': cal_date})\n if insert_args:\n Summary.insert_many(insert_args).execute()\n cycle_args = []\n escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(\n Escalation.id).alias('escalated_in')).where(Escalation.status ==\n ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==\n cal_date).group_by(Escalation.escalate_to)\n for i in escalated_in:\n cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.\n escalated_in, 'date': cal_date})\n amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM\n (RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)\n for i in amount_recovered_total:\n amount_recovered_total = i.principal_part + i.late_fee_part\n cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':\n amount_recovered_total, 'date': cal_date})\n if cycle_args:\n Summary.insert_many(cycle_args).execute()\n logging.info('cal summary done')\n send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY2)\ndef cron_summary2(payload, msg_id):\n \"\"\"已废弃,定时任务还在执行,具体情况待确定\"\"\"\n cal_date = date.today() - timedelta(days=1)\n employees = Bomber.select(Bomber, Role).join(Role)\n auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,\n AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')\n ).where(fn.DATE(AutoCallActions.created_at) == cal_date)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.\n is_null(False), RepaymentLog.is_bombed == True)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False))\n auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,\n AutoCallActions.result)\n amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)\n cleared = cleared.group_by(Application.latest_bomber)\n summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,\n 'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered': \n 0} for e in employees}\n for a in auto_call_actions:\n summary[a.bomber_id]['answered_calls'] += a.count\n if a.result == AutoCallResult.PTP.value:\n summary[a.bomber_id]['ptp'] += a.count\n if a.result == AutoCallResult.FOLLOW_UP.value:\n summary[a.bomber_id]['follow_up'] += a.count\n if a.result == AutoCallResult.NOT_USEFUL.value:\n summary[a.bomber_id]['not_useful'] += a.count\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'answered_calls': data['answered_calls'], 'ptp': data['ptp'],\n 'follow_up': data['follow_up'], 'not_useful': data['not_useful'\n ], 'cleared': data['cleared'], 'amount_recovered': str(data[\n 'amount_recovered']), 'date': cal_date})\n if insert_args:\n Summary2.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_DISCOUNT_APPROVED)\ndef bomber_discount_approved(payload, msg_id):\n app_id = payload['id']\n msg_type = payload['msg_type']\n discount_to = payload['discount_to']\n effective_to = payload['effective_to']\n application = Application.filter(Application.id == app_id).first()\n if not application:\n logging.error(\n 'discount approved msg send failed application %s not found',\n app_id)\n return\n template = Template.select(Template.text, Template.app).where(Template.\n type == ConnectType.AUTO_SMS.value, Template.id << Template.\n get_auto_sms_tpl(msg_type), Template.app == application.app).first()\n if not template:\n logging.error('discount approved msg send failed template %s not found'\n , msg_type)\n return\n promised_date = None\n if application.promised_date:\n promised_date = application.promised_date.strftime('%d-%m-%Y')\n tpl_data = {'user_name': application.user_name, 'due_days': application\n .overdue_days, 'app_name': application.app, 'phone': application.\n user_mobile_no, 'cs_number': cs_number_conf.get(application.app,\n '02150202889'), 'promised_date': promised_date, 'discount_to':\n discount_to, 'effective_to': effective_to}\n content = template.text.format(**tpl_data)\n data_list = [{'receiver': '62' + application.user_mobile_no, 'content':\n content, 'title': ''}]\n send_sms(data_list, msg_type, application.app)\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST)\ndef bomber_auto_call_list(payload, msg_id):\n with db.atomic():\n bomber_dispatch_app()\n dispatch_instalment_app()\n dis_apps = DispatchApp.select(DispatchApp.application).where(\n DispatchApp.status == DisAppStatus.NORMAL.value)\n c1_apps = Application.select(Application.id, Application.cycle,\n Application.follow_up_date, Application.called_times).where(Application\n .status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.\n AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.\n is_rejected == False, Application.promised_date.is_null(True) | (fn\n .DATE(Application.promised_date) < datetime.today().date())).order_by(\n Application.overdue_days, Application.apply_at)\n dis_apps_ids = [da.application_id for da in dis_apps]\n insert_args = []\n for a in c1_apps:\n if a.id in dis_apps_ids:\n continue\n insert_args.append({'application': a.id, 'cycle': a.cycle,\n 'follow_up_date': a.follow_up_date, 'called_times': 1 if a.\n called_times else 0, 'description': 'init'})\n if not insert_args:\n logging.error('no application need auto call')\n with db.atomic():\n AutoCallList.delete().execute()\n for idx in range(0, len(insert_args), 100):\n AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()\n for idx in range(0, len(insert_args), 100):\n application_list = [i['application'] for i in insert_args[idx:idx +\n 100]]\n send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {\n 'application_list': application_list})\n logging.info('bomber generate auto call list finished')\n send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})\n\n\nclass ChangeBomberTool(object):\n\n @staticmethod\n def in_record(bomber_id, ids, bd):\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.\n alias('application_id'), R(str(bomber_id)).alias('bomber_id'),\n fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(\n 'expected_out_time'), Application.overdue_days.alias(\n 'entry_overdue_days')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << ids)\n Application.update(latest_bomber=bomber_id).where(Application.id.\n in_(ids)).execute()\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bd),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n @staticmethod\n def out_record(a, bd):\n _id = str(a.id)\n DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a\n .overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]\n .get('principal_paid')), out_late_fee_pending=bd[_id].get(\n 'late_fee') - bd[_id].get('late_fee_paid')).where(\n DispatchAppHistory.application == a.id, DispatchAppHistory.\n bomber_id == a.latest_bomber_id).execute()\n a.last_bomber = a.latest_bomber\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n a.save()\n\n @staticmethod\n def classify(l, b):\n if len(l) == 1:\n return l[0]\n _l = filter(lambda x: x['bomber'] != b, l)\n return min(_l, key=lambda x: len(x['ids']))\n\n\n<mask token>\n\n\ndef bomber_dispatch_app():\n try:\n c1a_dispatch_app()\n except Exception as e:\n logging.error('c1a_dispatch_app error:%s' % str(e))\n cycle = {(1): 10, (2): 30, (3): 60, (4): 90}\n apps = Application.select().where(fn.DATE(Application.C2_entry) == date\n .today(), Application.type == ApplicationType.CASH_LOAN.value)\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C2.value)\n apps_ids = [a.id for a in apps]\n dispatch_inserts = []\n start_index = 0\n apps_length = len(apps_ids)\n logging.warning('apps length %s' % str(apps_length))\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)\n gen = CycleIter([b.id for b in bombers])\n existing_list = []\n end_index = start_index + int(apps_length * p.app_percentage)\n logging.info('partner length %s' % str(end_index))\n if not apps_ids[start_index:end_index]:\n continue\n bills = BillService().bill_list(application_ids=apps_ids[\n start_index:end_index])\n bill_dict = {bill['application_id']: bill for bill in bills}\n for a_id in apps_ids[start_index:end_index]:\n bomber = average_gen(gen, existing_list)\n q = DispatchApp.delete().where(DispatchApp.application == a_id\n ).execute()\n dispatch_inserts.append({'application': a_id, 'bomber': bomber,\n 'partner': p.id})\n application = Application.select().where(Application.id == a_id\n ).first()\n application.latest_bomber = bomber\n application.status = ApplicationStatus.AB_TEST.value\n application.ptp_bomber = None\n application.save()\n day_next_cycle = cycle.get(application.cycle\n ) - application.overdue_days\n DispatchAppHistory.create(application=a_id, partner_id=p.id,\n bomber_id=bomber, entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - Decimal(\n bill_dict[a_id].get('principal_paid')),\n entry_late_fee_pending=Decimal(bill_dict[a_id].get(\n 'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')\n ), expected_out_time=date.today() + timedelta(days=\n day_next_cycle))\n start_index = end_index\n with db.atomic():\n for idx in range(0, len(dispatch_inserts), 100):\n DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()\n config = SystemConfig.prefetch(SCI.AB_TEST_C2)\n c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)\n c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)\n c2 = apps_ids[start_index:]\n if c2:\n bills = BillService().bill_list(application_ids=c2)\n else:\n bills = []\n bill_dict = {bill['application_id']: bill for bill in bills}\n logging.info('c2 AB_test length: %s' % str(c2))\n gen = CycleIter(c2_bomber)\n existing_list = []\n for c in c2:\n bomber = average_gen(gen, existing_list)\n application = Application.filter(Application.id == c).first()\n application.status = ApplicationStatus.AB_TEST.value\n application.latest_bomber = bomber\n application.ptp_bomber = None\n application.save()\n day_next_cycle = 46 - application.overdue_days\n DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at\n =datetime.now(), entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - bill_dict[c].get(\n 'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(\n 'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),\n expected_out_time=date.today() + timedelta(days=day_next_cycle))\n ab_test_other()\n\n\ndef c1a_dispatch_app():\n today = datetime.today().date()\n tomorrow = today + timedelta(days=1)\n c1a_apps = Application.select().where(Application.status << [\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value], Application.dpd1_entry >= today, Application.dpd1_entry <\n tomorrow, Application.type == ApplicationType.CASH_LOAN.value)\n all_aids = [a.id for a in c1a_apps]\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C1A.value)\n end = 0\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n is_del == 0)\n start = end\n end += int(len(all_aids) * p.app_percentage)\n aids = all_aids[start:end]\n bids = [b.id for b in bombers]\n if not bids or not aids:\n continue\n average_number = get_average_number(len(aids), len(bids))\n p_end = 0\n for i, bid in enumerate(bids):\n p_start = p_end\n p_end += average_number[i]\n b_aids = aids[p_start:p_end]\n with db.atomic():\n q = Application.update(latest_bomber=bid, status=\n ApplicationStatus.AB_TEST.value).where(Application.id <<\n b_aids).execute()\n params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,\n 'application_ids': b_aids, 'dest_bomber_id': bid}\n new_in_record(**params)\n try:\n dispatch_inserts = []\n for aid in b_aids:\n dispatch_inserts.append({'application': aid, 'bomber':\n bid, 'partner': p.id, 'status': DisAppStatus.NORMAL\n .value})\n if dispatch_inserts:\n q = DispatchApp.insert_many(dispatch_inserts).execute()\n except Exception as e:\n logging.error('c1a分件写入dispatch_app error:%s' % str(e))\n\n\n<mask token>\n\n\ndef get_cash_bomber(bids, cycle):\n cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==\n 0, Bomber.instalment != cycle)\n cash_bids = [b.id for b in cash_bombers]\n return cash_bids\n\n\n<mask token>\n\n\ndef out_and_in_record_instalment(**kwargs):\n if not kwargs.get('application_ids'):\n return\n out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(\n DispatchAppHistory.application << kwargs['application_ids'],\n DispatchAppHistory.out_at.is_null(True)).execute()\n cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}\n period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')\n kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(kwargs['dest_bomber_id'])).alias(\n 'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.\n alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).\n alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << kwargs[\n 'application_ids'])\n application_list = list(subquery)\n for idx in range(0, len(application_list), 50):\n applications = application_list[idx:idx + 50]\n app_ids = [i.application_id for i in applications]\n overdue_bills = OverdueBill.select().where(OverdueBill.\n collection_id << app_ids)\n sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]\n bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)\n insert_args = lambad_instalment_result(bill_list, applications)\n if not insert_args:\n continue\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef c1b_dispatch_in_record(**kwargs):\n app_ids = kwargs.get('apps')\n partner_id = kwargs.get('partner_id', 'null')\n bill_dict = kwargs.get('bill_dict')\n period = kwargs.get('period')\n bomber_id = kwargs.get('bomber_id')\n if not all([app_ids, partner_id, bill_dict, period]):\n return False\n bill_dict = {str(k): v for k, v in bill_dict.items()}\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().\n alias('entry_at'), Application.overdue_days.alias(\n 'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.id << app_ids)\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef check_call_history(application):\n app_create_at = application.created_at + timedelta(days=4)\n if datetime.today().date() > app_create_at.date():\n call_actions = CallActions.select().where(CallActions.type == 0, \n CallActions.application == application.id, CallActions.\n created_at > datetime.now() - timedelta(days=5))\n for call in call_actions:\n if call.phone_status == PhoneStatus.CONNECTED.value:\n return True\n return False\n return True\n\n\n@action(MessageAction.BOMBER_SCAVENGER)\ndef scavenger(payload, msg_id):\n scavenger_time = -60\n scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'SCAVENGER_TIME').first()\n if scavenger and scavenger.value.isdigit():\n scavenger_time = -int(scavenger.value)\n update_auto_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value, description='scavenger').where(AutoCallList.status ==\n AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime\n .now() + timedelta(minutes=scavenger_time))\n count = update_auto_call_list.execute()\n logging.info('scavenger processed %s application', count)\n mail_box_scavenger_time = -30\n mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'MAIL_BOX_SCAVENGER_TIME').first()\n if mail_box_scavenger and mail_box_scavenger.value.isdigit():\n mail_box_scavenger_time = -int(mail_box_scavenger.value)\n update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.\n value, AutoCallList.updated_at < datetime.now() + timedelta(minutes\n =mail_box_scavenger_time))\n mail_box_count = update_mail_box_call_list.execute()\n logging.info('scavenger update mail box %s', mail_box_count)\n update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value\n ).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.\n updated_at < datetime.now() + timedelta(minutes=-30))\n ivr_result = update_auto_ivr.execute()\n logging.info('scavenger update %s ivr' % ivr_result)\n\n\n<mask token>\n\n\n@action(MessageAction.REPORT_BOMBER_COLLECTION)\ndef report_bomber_collection(payload, msg_id):\n start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)\n ).scalar()\n now = datetime.now()\n if start_date and str(start_date) == str(now)[:10]:\n return\n end_date = str(now + timedelta(days=1))[:10]\n start_date = str(now)[:10]\n dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))\n all_overdue_loan_sql1 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n GROUP BY 1;\n \"\"\"\n s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()\n d1 = OperatedDict(s_data1)\n all_overdue_loan_sql2 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE DATE(ba.follow_up_date) > CURDATE()\n AND ba.called_counts = 0\n GROUP BY 1;\n \"\"\"\n s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()\n d2 = OperatedDict(s_data2)\n overdue_loans_entered_into_predict_call_system_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE ba.called_counts >= 1\n GROUP BY 1;\n \"\"\"\n s_data3 = readonly_db.execute_sql(\n overdue_loans_entered_into_predict_call_system_sql).fetchall()\n d3 = OperatedDict(s_data3)\n loans_completed_sql = \"\"\"\n SELECT ba.cycle, COUNT(DISTINCT ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()\n d4 = OperatedDict(s_data4)\n connected_calls_automatic_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()\n d5 = OperatedDict(s_data5)\n connected_calls_manual_sql = \"\"\"\n SELECT bb.cycle, COUNT(bb.id)\n FROM bomber.bombing_history bb\n WHERE DATE(bb.created_at) = curdate()\n AND (bb.bomber_id < 150 OR bb.bomber_id > 200)\n GROUP BY bb.cycle;\n \"\"\"\n s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()\n d6 = OperatedDict(s_data6)\n logging.info('Directly get data from database successfully.')\n c1 = d1 - d2\n c2 = d3\n c3 = c2 / c1\n c4 = d4\n c5 = c4 / c2\n c6 = d5\n c7 = c6 / c4\n c8 = d6\n c9 = OperatedDict(get_agent())\n c10 = (c6 + c8) / c9\n try:\n c11 = average_call_duration_team(start_date, end_date)\n except AttributeError:\n c11 = {}\n lst = []\n for i in range(1, 5):\n lst.append({'apply_date': start_date, 'cycle': dct[i],\n 'all_overdue_loan': c1.get(i, 0),\n 'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),\n 'of_overdue_loans_entered_into_predict_call_system': round(c3.\n get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),\n 'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *\n 100, 1), 'connected_calls_automatic': c6.get(i, 0),\n 'connected_calls_automatic_completed_loans': round(c7.get(i, 0),\n 1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i, \n 0), 'average_calls_agent': round(c10.get(i, 0), 1),\n 'average_call_duration_team': round(c11.get(i, 0), 1)})\n ReportCollection.insert_many(lst).execute()\n logging.info('report_bomber_collection:Done!')\n\n\n<mask token>\n\n\ndef new_out_record(**kwargs):\n if not kwargs['application_ids']:\n return\n DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.\n bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.\n application << kwargs['application_ids'], DispatchAppHistory.out_at\n .is_null(True)).execute()\n if kwargs.get('month_dispatch'):\n return\n try:\n Application.update(ptp_bomber=None).where(Application.id << kwargs[\n 'application_ids']).execute()\n except Exception as e:\n logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[\n 'application_ids'], str(e)))\n\n\n<mask token>\n\n\ndef end_old_application(old_app, paid=False):\n if paid:\n if old_app.status == OldLoanStatus.WAITING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return\n if old_app.status == OldLoanStatus.PROCESSING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return old_app.application_id\n end_date = old_app.end_date\n now = datetime.now()\n if now >= max(end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n old_app.save()\n return old_app.application_id\n\n\n<mask token>\n\n\ndef start_old_application(old_app, cancel=False):\n application_id = old_app.application_id\n if cancel and old_app.status == OldLoanStatus.PAID.value:\n now = datetime.now()\n if old_app.start_date is None:\n old_app.status = OldLoanStatus.WAITING.value\n elif now >= max(old_app.end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n DispatchAppHistory.update(out_at=max(old_app.end_date, old_app.\n promised_date or now)).where(DispatchAppHistory.bomber_id ==\n old_app.bomber_id, DispatchAppHistory.application ==\n application_id).execute()\n else:\n old_app.status = OldLoanStatus.PROCESSING.value\n DispatchAppHistory.update(out_at=None).where(DispatchAppHistory\n .bomber_id == old_app.bomber_id, DispatchAppHistory.\n application == application_id).execute()\n old_app.save()\n return\n application = Application.get_or_none(Application.id == application_id,\n Application.status != ApplicationStatus.REPAID.value, Application.\n overdue_days > 90, Application.promised_date.is_null(True) | (fn.\n DATE(Application.promised_date) < datetime.today().date()))\n if not application:\n logging.error('Can not set old application %s to start collecting',\n application_id)\n return\n if old_app.status in OldLoanStatus.no_available():\n logging.info('%s has finished or paid', old_app.application_id)\n return\n config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)\n sp = config.get(SCI.OLD_APP_PERIOD, SCI.OLD_APP_PERIOD.default_value)\n old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value\n old_app.status = OldLoanStatus.PROCESSING.value\n old_app.bomber_id = old_app_bomber\n old_app.start_date = datetime.now()\n if not old_app.end_date:\n old_app.end_date = datetime.now() + timedelta(days=sp)\n old_app.save()\n in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,\n application_ids=[old_app.application_id], expected_out_time=str(\n old_app.end_date))\n\n\n<mask token>\n\n\ndef run_member_sql(sql):\n result = [0, 0]\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n sql_result = cursor.fetchone()\n if sql_result:\n result = sql_result\n except Exception as e:\n logging.info('run sql error: %s' % str(sql))\n return result\n\n\n<mask token>\n\n\ndef get_before_bomber(date_time):\n begin_time = str(date_time - timedelta(days=7))\n end_time = str(date_time)\n old_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at<date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (begin_time, begin_time))\n old_data = run_one_sql(old_sql)\n new_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at> '%s' \n and created_at<'%s'\n and overdue_days=1;\n \"\"\"\n % (begin_time, end_time))\n new_data = run_one_sql(new_sql)\n dpd4_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>date_add('%s',interval 1 day) \n and created_at< date_add('%s',interval 1 day)\n and overdue_days=4;\n \"\"\"\n % (begin_time, end_time))\n dpd4_data = run_one_sql(dpd4_sql)\n dpd2_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at< date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (end_time, end_time))\n dpd2_data = run_one_sql(dpd2_sql)\n all_money = old_data + new_data\n repayment = all_money - dpd4_data - dpd2_data\n pro = 0\n if all_money:\n pro = repayment / all_money * 100\n RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,\n proportion=pro, repayment=repayment)\n\n\n<mask token>\n\n\ndef get_c1a_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=19)\n repayment_sql = (\n \"\"\"\n select \n sum(b.principal_part+b.late_fee_part) as paid_amount, \n cdt\n from \n (select \n br.principal_part, br.late_fee_part, \n date(cdt) as cdt, br.repay_at, br.application_id\n from (\n select ba.id, ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n if not repayment:\n return\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n for d in repayment:\n repay = d[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n d[1], RepaymentReportInto.cycle == Cycle.C1A.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = round(repay, 3)\n pro = repay / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_c2_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n not_contain_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_money = run_one_sql(not_contain_sql)\n begin_date = date_time - timedelta(days=37)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n not_contain_repay_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_repay = run_all_sql(not_contain_repay_sql)\n if not not_contain_money and repayment:\n return\n for i in ContainOut.values():\n if i == ContainOut.NOT_CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(\n not_contain_money, 3), proportion='0', repayment=0,\n is_first_loan=is_first_loan, contain_out=ContainOut.\n NOT_CONTAIN.value)\n for repay in not_contain_repay:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n elif i == ContainOut.CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(all_money, 3),\n proportion='0', repayment=0, is_first_loan=\n is_first_loan, contain_out=ContainOut.CONTAIN.value)\n for repay in repayment:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\ndef get_c3_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id, ba.c3_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=30)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c3_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c3_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = '%s'\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 4\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C3.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n if not repayment:\n return\n for repay in repayment:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n repay[1], RepaymentReportInto.cycle == Cycle.C3.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = repay_money\n pro = 0\n if report.all_money and int(report.all_money):\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_static_bomber(begin_date):\n active_date = begin_date - timedelta(days=8)\n bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),\n BomberR.last_active_at.alias('active')).where(BomberR.\n last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])\n summary = []\n for bomber in bombers:\n summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':\n bomber.role.cycle, 'work_ind': 0})\n SummaryBomber.insert_many(summary).execute()\n\n\n<mask token>\n\n\n@time_logger\ndef get_call_and_made(end_date, begin_date, real_time_query=False):\n call_sql = (\n \"\"\"\n select \n bomber_id, \n count(case when relationship is not null then application_id end) \n as 'call_cnt', \n count(distinct case when relationship is not null then \n application_id end) as 'call_case',\n count(case when phone_status=4 then application_id end) as 'connect',\n count(distinct case when phone_status=4 then application_id end) \n as 'connect_case'\n from (\n select bomber_id,application_id,phone_status, cycle, relationship\n from bomber.call_actions ba\n where created_at>'%s' and created_at<'%s'\n and type in (0, 1)\n ) a\n group by 1\n \"\"\"\n % (begin_date, end_date))\n calls = run_all_sql(call_sql)\n if real_time_query:\n return calls\n for call in calls:\n bomber, call_cnt, case_made, connect_cnt, case_connect = call\n SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,\n call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(\n SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date\n ).execute()\n return calls\n\n\n@time_logger\ndef get_claimed_cnt(end_date, begin_date, real_time_query=False):\n table_date = begin_date - timedelta(days=30)\n claimed_sql = (\n \"\"\"\n SELECT\n COUNT( `t1`.`application_id` ) AS cnt,\n `t1`.`bomber_id` AS bomber_id \n FROM\n `dispatch_app_history` AS t1 \n WHERE\n ( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null ) \n AND ( `t1`.`bomber_id` != 1000 ) \n AND ( `t1`.`partner_id` IS null ) \n AND ( `t1`.`entry_at` > '%s' ) \n AND ( `t1`.`entry_at` < '%s' ) \n GROUP BY\n `t1`.`bomber_id`\n \"\"\"\n % (begin_date, table_date, end_date))\n claimeds = run_all_sql(claimed_sql)\n if real_time_query:\n return claimeds\n for claimed in claimeds:\n cnt, bomber_id = claimed\n SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == bomber_id).execute()\n return claimeds\n\n\ndef get_sms_data(end_data, begin_data):\n all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(\n 'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')\n ).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.\n created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())\n ).group_by(ConnectHistoryR.operator)\n for sms in all_sms:\n SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==\n begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()\n return all_sms\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_NEW)\ndef summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_active_bomber(begin_date)\n get_call_and_made(end_date, begin_date)\n get_claimed_cnt(end_date, begin_date)\n get_sms_data(end_date, begin_date)\n get_ptp_data(end_date, begin_date)\n get_recover_amount(end_date, begin_date)\n get_unfollowed(begin_date)\n get_unfollowed_call(begin_date)\n\n\ndef get_new_case_amount(begin_date, end_date):\n all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.\n entry_late_fee_pending + DispatchAppHistoryR.\n entry_principal_pending).alias('pending'), DispatchAppHistoryR.\n bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')\n ).where(DispatchAppHistoryR.entry_at > begin_date, \n DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.\n partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)\n for case in all_case:\n SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt\n =case.cnt).where(SummaryBomber.bomber_id == case.bomber_id, \n SummaryBomber.time == begin_date).execute()\n return all_case\n\n\n<mask token>\n\n\ndef get_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c2_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c2_entry > '%s' \n AND ba.c2_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c3_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c3_entry > '%s' \n AND ba.c3_entry < '%s' \n ) a\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date, end_date))\n case_cleared_sums = run_all_sql(sql)\n for clear in case_cleared_sums:\n SummaryBomber.update(new_case_cleared_sum=clear[1]).where(\n SummaryBomber.time == begin_date, SummaryBomber.bomber_id ==\n clear[0]).execute()\n\n\n@time_logger\ndef get_new_case_call(begin_date, end_date, real_query_time=False):\n sql = (\n \"\"\"\n SELECT\n bd.bomber_id,\n count( DISTINCT bd.application_id )\n FROM\n bomber.dispatch_app_history bd\n INNER JOIN bomber.call_actions bc \n ON bd.application_id = bc.application_id \n AND bd.bomber_id = bc.bomber_id \n AND date( bd.entry_at ) = date( bc.created_at ) \n WHERE\n entry_at > '%s' \n AND entry_at < '%s' \n AND partner_id IS NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n new_case_calls = run_all_sql(sql)\n if real_query_time:\n return new_case_calls\n for call in new_case_calls:\n SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber\n .bomber_id == call[0], SummaryBomber.time == begin_date).execute()\n return new_case_calls\n\n\n@time_logger\ndef get_calltime_avg(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction,\n count( 1 ) AS auto_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' ' \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n AND an.recording is not null\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]][0] += data[1]\n result[data[0]][1] += data[2]\n continue\n result[data[0]] = [data[1], data[2]]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=\n value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == key).execute()\n return result\n\n\ndef get_no_calltime_avg(begin_date, end_date):\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND (an.status!='ANSWERED' or an.recording is null) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n for data in manuals:\n SummaryBomber.update(calltime_no_case_sum=data[1],\n calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /\n data[2] if data[2] else 0).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == data[0]).execute()\n\n\n@time_logger\ndef get_calltime_sum(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' '\n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]] += data[1]\n continue\n result[data[0]] = data[1]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == key).execute()\n return result\n\n\n<mask token>\n\n\n@action(MessageAction.UPDATE_SUMMARY_NEW)\ndef update_summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'UPDATE_SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_new_case_amount(begin_date, end_date)\n get_kp_cleared(begin_date, end_date)\n get_kp_today(begin_date, end_date)\n get_ptp_cnt(begin_date, end_date)\n get_ptp_call_cnt(begin_date, end_date)\n get_new_case_cleared(begin_date, end_date)\n get_new_case_call(begin_date, end_date)\n get_calltime_avg(begin_date, end_date)\n get_no_calltime_avg(begin_date, end_date)\n get_calltime_sum(begin_date, end_date)\n\n\ndef get_cycle_claimed(begin_date, end_date):\n sql = (\n \"\"\"\n select cycle,count(1)\n from bomber.application where cycle in (1,2,3,4)\n and (finished_at is null or (finished_at>'%s'))\n and created_at>'2018-09-01'\n group by 1\n \"\"\"\n % begin_date)\n result = run_all_sql(sql)\n return result\n\n\n@time_logger\ndef cycle_new_case(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( ba.id ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.created_at ) = bo.which_day_overdue \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c1b_entry ) = bo.which_day_overdue\n WHERE\n c1b_entry > '%s' \n AND c1b_entry < '%s' \n UNION\n SELECT\n 3 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c2_entry ) = bo.which_day_overdue \n WHERE\n c2_entry > '%s' \n AND c2_entry < '%s' \n UNION\n SELECT\n 4 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c3_entry ) = bo.which_day_overdue\n WHERE\n c3_entry > '%s' \n AND c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n all_datas = run_all_sql(sql)\n if real_time_query:\n return all_datas\n for data in all_datas:\n SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()\n return all_datas\n\n\n@time_logger\ndef get_cycle_new_case_call(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.created_at ) = date( bc.created_at ) \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c1b_entry ) = date( bc.created_at ) \n WHERE\n ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s'\n UNION\n SELECT\n 3 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c2_entry ) = date( bc.created_at ) \n WHERE\n ba.c2_entry > '%s' \n AND ba.c2_entry < '%s'\n UNION\n SELECT\n 4 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c3_entry ) = date( bc.created_at ) \n WHERE\n ba.c3_entry > '%s' \n AND ba.c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n cycle_datas = run_all_sql(sql)\n if real_time_query:\n return cycle_datas\n for data in cycle_datas:\n SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber\n .time == begin_date, SummaryBomber.cycle == data[0], \n SummaryBomber.bomber_id == data[0]).execute()\n return cycle_datas\n\n\ndef get_cycle_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n '1' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.created_at ) = date( br.repay_at ) \n WHERE ba.created_at > '%s' \n AND ba.created_at < '%s' \n GROUP BY 1, 2 ) a \n UNION\n SELECT \n '2' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n GROUP BY 1, 2) a\n \"\"\"\n % (begin_date, end_date, begin_date, end_date))\n cycle_cleared = run_all_sql(sql)\n for i in cycle_cleared:\n SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber\n .cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.\n time == begin_date).execute()\n\n\n<mask token>\n\n\ndef get_change_bomber():\n cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle\n .C3.value}\n result = {}\n bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,\n BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=\n BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.\n created_at) == date.today(), BomberLog.role_id << list(\n cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.\n instalment == 0).dicts()\n for b_log in bomber_logs:\n cycle = cycle_role_map.get(b_log['role_id'])\n group_id = b_log['group_id']\n if cycle in result:\n if group_id not in result[cycle]:\n result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}\n else:\n result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}}\n if b_log['operation'] == 0:\n result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])\n if result:\n bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map\n .keys())), Bomber.is_del == 0, Bomber.instalment == 0)\n for b in bombers:\n cycle_result = result.get(cycle_role_map[b.role_id], {})\n role_result = cycle_result.get(b.group_id)\n if not role_result:\n continue\n role_result['new_ids'].append(b.id)\n resutl_list = []\n for cycle, group_dict in result.items():\n resutl_list.extend(list(group_dict.values()))\n return resutl_list\n return []\n\n\n<mask token>\n\n\ndef get_surplus_application(new_ids, del_ids, average_nums, classified_apps):\n surplus_apps = []\n for del_id in del_ids:\n del_res = classified_apps.get(del_id, {})\n p_list = del_res.get('p_list', [])\n np_list = del_res.get('np_list', [])\n del_res['need_num'] = -(len(p_list) + len(np_list))\n del_res['to_list'] = np_list + p_list\n surplus_apps.extend(p_list)\n surplus_apps.extend(np_list)\n for index, bid in enumerate(new_ids):\n average = average_nums[index]\n bomber_app = classified_apps.get(bid)\n if not bomber_app:\n bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid\n ).first()\n bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':\n [], 'np_num': 0, 'need_num': average, 'partner_id': bomber.\n partner_id if bomber else ''}\n classified_apps[bid] = bomber_app\n else:\n p_num = len(bomber_app['p_list'])\n np_num = len(bomber_app['np_list'])\n if p_num > average:\n bomber_app['need_num'] = -np_num\n else:\n bomber_app['need_num'] = average - (p_num + np_num)\n bomber_app['p_num'] = p_num\n bomber_app['np_num'] = np_num\n if bomber_app['need_num'] < 0:\n random.shuffle(bomber_app['np_list'])\n res_over = bomber_app['np_list'][:-bomber_app['need_num']]\n bomber_app['to_list'] = res_over\n surplus_apps.extend(res_over)\n classified_apps_list = sorted(classified_apps.values(), key=lambda x: x\n ['need_num'], reverse=True)\n return surplus_apps, classified_apps_list\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)\ndef bomber_dispatch_applications(payload, msg_id):\n change_bombers = get_change_bomber()\n instalment_change_bombers = get_instalment_change_bomber()\n params = {ApplicationType.CASH_LOAN.value: change_bombers,\n ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}\n for type, bombers in params.items():\n change_bomber_dispatch_apps(change_bombers=bombers, type=type)\n\n\n<mask token>\n\n\ndef get_summary_daily_time():\n mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')\n mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')\n now_date = datetime.now()\n now_date_time = now_date.time()\n today_str = str(now_date.date())\n if now_date_time < mid_time_t1.time():\n yes_date = now_date - timedelta(days=1)\n yes_date_str = str(yes_date.date())\n begin_str = yes_date_str + ' 17:20:00'\n end_str = today_str + ' 00:00:00'\n elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():\n begin_str = today_str + ' 00:00:00'\n end_str = today_str + ' 12:40:00'\n else:\n begin_str = today_str + ' 12:40:00'\n end_str = today_str + ' 17:20:00'\n begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')\n end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')\n summary_datetime = now_date - timedelta(minutes=30)\n summary_date = summary_datetime.date()\n return begin_time, end_time, summary_date\n\n\n@action(MessageAction.SUMMARY_DAILY)\ndef summary_daily_data(payload, msg_id):\n begin_time, end_time, summary_date = get_summary_daily_time()\n call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.\n bomber_id, CallActionsR.application_id, CallActionsR.promised_date,\n CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(\n CallActionsR.created_at >= begin_time, CallActionsR.created_at <\n end_time, CallActionsR.type << (0, 1))\n summary_dailys = {}\n for call in call_actions:\n if call.bomber_id not in summary_dailys:\n summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.\n bomber_id, 'summary_date': str(summary_date)}\n if call.name and call.number:\n summary_dailys[call.bomber_id]['call_cnt'] += 1\n if call.promised_date:\n summary_dailys[call.bomber_id]['ptp_cnt'] += 1\n C1_sql = (\n \"\"\"\n SELECT a.current_bomber_id,\n sum(principal_part+late_fee_part) as pay_amount,a.cycle\n from \n (select a.cycle,a.current_bomber_id,b.username,a.principal_part,\n a.late_fee_part,a.application_id,a.repay_at\n FROM bomber.repayment_log a ,bomber.bomber b\n WHERE a.repay_at >= '%s' AND a.repay_at <'%s'\n AND a.current_bomber_id !=''\n AND a.current_bomber_id = b.id\n and b.role_id in (1,2,4,5)\n and principal_part+late_fee_part>0\n group by 6,7) a\n GROUP BY a.cycle,a.current_bomber_id\n \"\"\"\n % (begin_time, end_time))\n C1_repayment = run_all_sql(C1_sql)\n other_sql = (\n \"\"\"\n select current_bomber_id,sum(pay_amount) as pay_amount,cycle\n from (\n select application_id,current_bomber_id,pay_amount,repay_at,cycle\n from (\n select br.application_id,br.current_bomber_id,\n br.principal_part+br.late_fee_part as pay_amount,br.repay_at,\n br.cycle\n from bomber.repayment_log br\n left join bomber.bomber bb on br.current_bomber_id=bb.id\n where exists (select 1 from bomber.bombing_history bb \n where br.current_bomber_id=bb.bomber_id \n and br.application_id=bb.application_id \n and bb.created_at<br.repay_at \n and (bb.promised_date is not null \n or bb.promised_amount is not null))\n and br.repay_at >= '%s'\n and br.repay_at < '%s'\n and bb.role_id in (3,6,7,8,9) \n and br.principal_part+br.late_fee_part > 0\n group by 1,4\n ) a\n group by 1,4) b\n group by 1\n \"\"\"\n % (begin_time, end_time))\n other_repayment = run_all_sql(other_sql)\n all_repayment = C1_repayment + other_repayment\n for res in all_repayment:\n bomber_id, pay_amount, cycle = res\n if bomber_id in summary_dailys:\n summary_dailys[bomber_id]['repayment'] += pay_amount\n else:\n summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': cycle, 'repayment': pay_amount, 'bomber_id':\n bomber_id, 'summary_date': str(summary_date)}\n insert_values = list(summary_dailys.values())\n if insert_values:\n SummaryDaily.insert_many(insert_values).execute()\n\n\n<mask token>\n\n\ndef get_app_logs(apps):\n app_logs = {}\n all_np_apps = []\n all_p_apps = []\n for a in apps:\n latest_bomber = a['latest_bomber']\n latest_bomber = a['cycle'] if not latest_bomber else latest_bomber\n if latest_bomber in app_logs:\n app_logs[latest_bomber]['to_ids'].append(a['id'])\n else:\n app_logs[latest_bomber] = {'bomber_id': latest_bomber, 'to_ids':\n [a['id']], 'np_ids': [], 'p_ids': []}\n if a['promised_date'] and a['promised_date'].date() >= datetime.now(\n ).date():\n app_logs[latest_bomber]['p_ids'].append(a['id'])\n all_p_apps.append(a)\n else:\n app_logs[latest_bomber]['np_ids'].append(a['id'])\n all_np_apps.append(a)\n return app_logs, all_np_apps, all_p_apps\n\n\ndef month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):\n apps = list(apps)\n np_apps = list(np_apps)\n random.shuffle(np_apps)\n apps_len = len(apps)\n np_apps_len = len(np_apps)\n end = 0\n all_app_precentage = 0\n partners = Partner.select().where(Partner.cycle == cycle, Partner.\n status == PartnerStatus.NORMAL.value)\n for p in partners:\n all_app_precentage += p.app_percentage\n for partner in partners:\n bombers = Bomber.select().where(Bomber.partner == partner.id, \n Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.\n value)\n bids = {b.id: b for b in bombers}\n if len(bids) == 0:\n logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))\n continue\n start = end\n if np_apps_len >= int(apps_len * all_app_precentage):\n end = start + int(apps_len * partner.app_percentage)\n else:\n end = start + int(np_apps_len * partner.app_percentage /\n all_app_precentage)\n partner_app = np_apps[start:end]\n dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)\n np_apps = np_apps[end:]\n return np_apps\n\n\ndef month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):\n sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',\n (4): 'AB_TEST_C3'}\n sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])\n sys_values = json.loads(sys_config.value)\n bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0\n )\n if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):\n bombers = bombers.where(Bomber.instalment == 0)\n bids = {b.id: b for b in bombers}\n if cycle == Cycle.C1A.value:\n np_ids = [a['id'] for a in np_apps]\n np = Application.update(status=ApplicationStatus.PROCESSING.value,\n ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids\n ).execute()\n bomber_app_logs = app_logs.get(cycle, {})\n out_param = {'application_ids': bomber_app_logs.get('to_ids', []),\n 'month_dispatch': 1, 'src_bomber_id': cycle}\n new_out_record(**out_param)\n in_param = {'cycle': cycle, 'application_ids': np_ids,\n 'dest_bomber_id': cycle}\n new_in_record(**in_param)\n bomber_app_logs['need_num'] = len(np_apps)\n bomber_app_logs['form_ids'] = np_ids\n bomber_app_logs['status'] = 1\n else:\n dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)\n dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)\n\n\ndef dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,\n type=ApplicationType.CASH_LOAN.value):\n apps = list(apps)\n random.shuffle(apps)\n bids_list = list(bids.keys())\n if len(bids_list) <= 0:\n logging.info('get_dispatch_app_to_bomber no bids')\n return\n average_num = get_average_number(len(apps), len(bids_list))\n bomber_end = 0\n with db.atomic():\n for index, bid in enumerate(bids_list):\n current_bomber = bids.get(bid)\n bomber_app_logs = app_logs.get(bid, {})\n bomber_start = bomber_end\n bomber_end = bomber_start + average_num[index]\n bomber_apps = apps[bomber_start:bomber_end]\n from_p, from_np, from_ids, status = [], [], [], 0\n for ba in bomber_apps:\n promised_date = ba.get('promised_date')\n from_ids.append(ba['id'])\n if promised_date and promised_date.date() >= date.today():\n from_p.append(ba['id'])\n else:\n from_np.append(ba['id'])\n app_status = ApplicationStatus.AB_TEST.value\n if (cycle == Cycle.C1A.value and not out_partner and type ==\n ApplicationType.CASH_LOAN.value):\n app_status = ApplicationStatus.PROCESSING.value\n if from_p:\n p = Application.update(ptp_bomber=bid, latest_bomber=bid,\n status=app_status).where(Application.id << from_p).execute(\n )\n p_ids = bomber_app_logs.get('p_ids', []) + from_p\n bomber_app_logs['p_ids'] = p_ids\n if from_np:\n np = Application.update(latest_bomber=bid, ptp_bomber=None,\n status=ApplicationStatus.AB_TEST.value).where(\n Application.id << from_np).execute()\n np_ids = bomber_app_logs.get('np_ids', []) + from_np\n bomber_app_logs['np_ids'] = np_ids\n in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.\n partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}\n if type == ApplicationType.CASH_LOAN.value:\n out_param = {'src_bomber_id': bid, 'application_ids':\n bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}\n new_out_record(**out_param)\n new_in_record(**in_param)\n else:\n out_and_in_record_instalment(**in_param)\n bomber_app_logs['status'] = 1\n need_num = bomber_app_logs.get('need_num', 0) + average_num[index]\n bomber_app_logs['need_num'] = need_num\n all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids\n bomber_app_logs['form_ids'] = all_form_ids\n if not out_partner:\n continue\n try:\n DispatchApp.delete().where(DispatchApp.application.in_(\n from_ids)).execute()\n dispatch_ins = [{'application': id, 'partner':\n current_bomber.partner_id, 'bomber': bid, 'status':\n DisAppStatus.NORMAL.value} for id in from_ids]\n DispatchApp.insert_many(dispatch_ins).execute()\n except Exception as e:\n logging.info(\n 'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str\n (e), bid, from_ids))\n\n\n<mask token>\n\n\ndef calc_entry_time(overdue_days):\n app_entry_time = {}\n overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],\n 'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}\n for key, value in overdue_entry.items():\n if value[0] <= overdue_days <= value[1]:\n app_entry_time[key] = datetime.now()\n else:\n app_entry_time[key] = None\n return app_entry_time\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_BOMBER_OVERDUE)\ndef summary_bomber_overdue_everyday(payload, msg_id):\n cycle_list = Cycle.values()\n which_day = date.today()\n for cycle in cycle_list:\n apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,\n ApplicationR.ptp_bomber, ApplicationR.overdue_days,\n ApplicationR.promised_date, ApplicationR.follow_up_date,\n ApplicationR.external_id, OverdueBillR.status, OverdueBillR.\n periods, OverdueBillR.sub_bill_id).join(OverdueBillR,\n JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id\n ).where(ApplicationR.status != ApplicationStatus.REPAID.value, \n ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()\n bomber_overdue_list = []\n for app in apps:\n status = app.get('status')\n if status == ApplicationStatus.REPAID.value:\n continue\n ptp_bomber = app.get('ptp_bomber')\n promised_date = app.get('promised_date')\n follow_up_date = app.get('follow_up_date')\n if not promised_date or promised_date.date() < date.today():\n ptp_bomber = promised_date = None\n if not follow_up_date or follow_up_date.date() < date.today():\n follow_up_date = None\n overdue_dict = {'collection_id': app.get('id'), 'external_id':\n app.get('external_id'), 'sub_bill_id': app.get(\n 'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app\n .get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':\n ptp_bomber, 'promised_date': promised_date,\n 'follow_up_date': follow_up_date, 'which_day': which_day,\n 'overdue_days': app.get('overdue_days')}\n bomber_overdue_list.append(overdue_dict)\n try:\n if bomber_overdue_list:\n with db.atomic():\n for index in range(0, len(bomber_overdue_list), 1000):\n insert_list = bomber_overdue_list[index:index + 1000]\n BomberOverdue.insert_many(insert_list).execute()\n except Exception as e:\n logging.info(\n 'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %\n (cycle, str(which_day), str(e)))\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)\ndef today_ptp_auto_call_switch(payload, msg_id):\n today = datetime.today().date()\n next_day = today + timedelta(days=1)\n apps = ApplicationR.select(ApplicationR.latest_bomber).where(\n ApplicationR.promised_date < next_day, ApplicationR.promised_date >=\n today, ApplicationR.promised_date.is_null(False), ApplicationR.\n status != ApplicationStatus.REPAID.value, ApplicationR.cycle <\n Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(\n ApplicationR.latest_bomber)\n bids = [a.latest_bomber_id for a in apps]\n if not bids:\n return\n q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(\n BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids\n ).execute()\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)\ndef update_today_switch_every_day(payload, msg_id):\n q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(\n BomberPtp.auto_ext.is_null(False)).execute()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@action(MessageAction.BOMBER_HEALTH_CHECK)\ndef health_check(payload, msg_id):\n pass\n\n\n<mask token>\n\n\ndef dpd1_process(lst):\n \"\"\"已废弃的方法\"\"\"\n if not lst:\n return\n for key, l in lst.items():\n rule = getattr(BeforeInBomber, key).value\n query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)\n ).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(\n rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.\n call_success()))\n success_set = {i.loanid for i in query}\n failed_list = list(set(l) - success_set)\n post_params = {'$and': rule.get('$and'), 'app_list': failed_list}\n resp = Hyperloop().post('/bomber/score/verify', json=post_params)\n if not resp.ok:\n logging.error('hyperloop score verification failed: %s, %s',\n str(resp.status_code), str(resp.text))\n logging.error('hyperloop score verification failed: %s', str(\n post_params))\n continue\n logging.debug('hyperloop score verification success: %s', resp.content)\n resp_json = resp.json()\n app_list = resp_json['data']\n if not app_list:\n continue\n for item in app_list:\n if random.randint(0, 5) == 1:\n send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':\n int(item)})\n\n\n@action(MessageAction.GET_IVR)\ndef get_ivr(payload, msg_id):\n logging.warning('start get_ivr')\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'DPD1-3_INTO_IVR').first()\n now = date.today()\n if sys_config and sys_config.value:\n start = now - timedelta(days=3)\n else:\n start = now\n end = now + timedelta(days=4)\n item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()\n if not item:\n AutoIVR.delete().execute()\n current_page = 0\n elif item.current_page >= item.total_page:\n return\n else:\n current_page = item.current_page\n auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,\n 'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,\n 'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':\n 10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,\n 'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,\n 'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,\n 'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,\n 'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,\n 'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,\n 'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,\n 'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,\n 'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,\n 'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,\n 'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':\n 101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,\n 'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,\n 'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,\n 'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}\n current_page += 1\n with db.atomic() as transaction:\n while True:\n bill_service = BillService()\n ivr_action = bill_service.ivr_pages(page=current_page,\n page_size=500, start_time=utc_datetime(str(start)),\n end_time=utc_datetime(str(end)))\n result = ivr_action['result']\n page_size = int(ivr_action.get('page_size', 0))\n total_page = int(ivr_action.get('total_page', 0))\n insert_args = []\n for a in result:\n due_at = no_utc_datetime(a['due_at'])\n days = (due_at.date() - now).days\n if days == 2:\n continue\n if days > 0:\n time = str(days)\n else:\n time = str(days).replace('-', 'PDP')\n key = a['app_name'] + str(a['su']) + time\n group = auto_ivr.get(key)\n user_id = a['user_id']\n try:\n user_resp = AccountService().get_user(path_params={\n 'user_id': user_id})\n if str(user_resp['mobile_no']) == str(a['user_mobile_no']):\n numbers = a['user_mobile_no']\n else:\n numbers = a['user_mobile_no'] + ',' + user_resp.get(\n 'mobile_no')\n except:\n logging.error('Request Account Service Error.')\n numbers = a['user_mobile_no']\n insert_args.append({'application_id': a['id'], 'numbers':\n numbers, 'group': group, 'user_id': user_id})\n AutoIVR.insert_many(insert_args).execute()\n if current_page == 1:\n IVRActionLog.create(total_page=total_page, proc_date=now,\n page_size=page_size, current_page=current_page)\n item = IVRActionLog.get(IVRActionLog.proc_date == now)\n else:\n item.current_page = current_page\n item.page_size = page_size\n item.total_page = total_page\n item.save()\n transaction.commit()\n current_page += 1\n if current_page > int(total_page):\n break\n if sys_config and sys_config.value:\n try:\n classfiy_dpd_ptp_apps()\n except Exception as e:\n logging.error('dpd1-3_test_error:%s' % str(e))\n\n\ndef ivr_t2_test():\n t2_groups = [39, 40, 41, 42, 43, 44]\n ivr_test_proportion = 0.2\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'IVR_TEST_PROPORTION').first()\n if sys_config and sys_config.value:\n ivr_test_proportion = float(sys_config.value)\n t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.\n status == AutoIVRStatus.AVAILABLE.value)\n t2_dict = defaultdict(list)\n for ivr in t2_ivrs:\n t2_dict[ivr.group].append(ivr.id)\n test_ivr_ids = []\n for group, ivr_ids in t2_dict.items():\n number = ceil(len(ivr_ids) * ivr_test_proportion)\n test_ivr_ids += ivr_ids[:number]\n if not test_ivr_ids:\n return\n q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.\n group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.APP_MERGE)\n@deprecated(version='1.0', reason='This function will be removed soon')\ndef app_merge(payload, msg_id):\n sql = \"\"\"\n select *\n from (\n select a.id as id\n from dashboard.application as a\n inner join repayment.bill2 as b on b.external_id = a.id\n where not exists (\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.app = 'DanaCepat'\n and a.is_first_loan = 1\n and a.apply_at < '2018-08-23 20:50:00'\n and b.overdue_days between 1 and 3\n and b.status != 2) result\n where not exists (\n select 1\n from bomber.application as a\n where a.cycle = 1\n and a.status = 4\n and a.id = result.id\n )\n \"\"\"\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n new_data = cursor.fetchall()\n cursor.close()\n if new_data:\n bomber = [103, 104]\n for d in new_data:\n app_id = {'id': d[0]}\n application_overdue(app_id, None)\n Application.update(status=ApplicationStatus.AB_TEST.value,\n latest_bomber=random.choice(bomber), ptp_bomber=None).where(\n Application.id == d[0]).execute()\n logging.warning('add new app success')\n ptp = date.today() - timedelta(days=1)\n del_sql = (\n \"\"\"\n select a.id\n from bomber.application as a\n where exists(\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.cycle = 1\n and a.status = 4\n and (a.promised_date is null or a.promised_date < \"%s\")\n \"\"\"\n % ptp)\n cursor = readonly_db.get_cursor()\n cursor.execute(del_sql)\n del_date = cursor.fetchall()\n cursor.close()\n if del_date:\n return\n ids = list()\n for d in del_date:\n ids.append(d[0])\n Application.update(status=ApplicationStatus.UNCLAIMED.value,\n latest_bomber=None).where(Application.id << ids).execute()\n\n\n@action(MessageAction.APPLICATION_BOMBER)\ndef application_overdue(payload, msg_id):\n application_id = payload['id']\n sub_bill_id = payload['bill_sub_id']\n local_app = Application.select().where(Application.external_id ==\n application_id).order_by(Application.finished_at).first()\n if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:\n logging.info('application %s overdue, already exists', application_id)\n add_contact(local_app)\n return\n if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:\n overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==\n sub_bill_id, OverdueBillR.external_id == application_id)\n if overdue_bill.exists():\n logging.info(\n 'application %s,sub_bill_id %s overdue, already exists' % (\n application_id, sub_bill_id))\n return\n try:\n sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])\n sub_bill = sub_bill[0]\n except Exception:\n logging.error(\n 'application %s overdue, get sub_bill info failed:Request To repayment Error'\n , application_id)\n return\n if sub_bill['status'] == 2:\n logging.error('application %s overdue, but bills already cleared',\n application_id)\n return\n overdue_days = sub_bill.get('overdue_days', 0)\n if overdue_days == 0:\n logging.info('application {} no overdue'.format(str(application_id)))\n return\n gold_eye = GoldenEye().get('/applications/%s' % application_id)\n if not gold_eye.ok:\n logging.error('get application %s failed: Request to GoldenEye.',\n application_id)\n return\n gold_app = gold_eye.json().get('data')\n user_id = gold_app['user_id']\n apply_history = Dashboard().get('/users/%s/apply-history' % user_id)\n if not apply_history.ok:\n logging.error(\n 'get user %s apply history failed: Request to Dashboard Failed.',\n user_id)\n return\n history = apply_history.json().get('data')\n loan_success_times = len([(1) for i in history if i['status'] in [80, \n 90, 100, 70] and i['id'] != gold_app['id']])\n id = application_id\n type = ApplicationType.CASH_LOAN.value\n bill_id = sub_bill.get('bill_id')\n amount = sub_bill.get('amount')\n amount_net = sub_bill.get('amount_net')\n interest_rate = sub_bill.get('interest_rate')\n overdue_days = sub_bill.get('overdue_days')\n origin_due_at = sub_bill.get('origin_due_at')\n sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,\n 'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),\n 'overdue_days': overdue_days, 'origin_due_at': origin_due_at,\n 'amount': amount, 'amount_net': amount_net, 'interest_rate':\n interest_rate, 'external_id': application_id}\n if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:\n if local_app and local_app.status != ApplicationStatus.REPAID.value:\n sub_overdue_bill['collection_id'] = local_app.id\n local_app.amount += amount\n local_app.amount_net += amount_net\n local_app.save()\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('application %s,sub_bill_id:%s overdue created' %\n (application_id, sub_bill_id))\n return\n else:\n id = idg()\n type = ApplicationType.CASH_LOAN_STAGING.value\n sub_overdue_bill['collection_id'] = id\n ptp_info = BombingHistory.filter(BombingHistory.application == id).first()\n promised_amount = ptp_info and ptp_info.promised_amount\n promised_date = ptp_info and ptp_info.promised_date\n application = Application.create(id=id, user_id=gold_app['user_id'],\n user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[\n 'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],\n contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(\n 'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=\n birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(\n gold_app.get('id_ektp')), profile_province=(gold_app.get(\n 'profile_province') or {}).get('name'), profile_city=(gold_app.get(\n 'profile_city') or {}).get('name'), profile_district=(gold_app.get(\n 'profile_district') or {}).get('name'), profile_residence_time=\n gold_app.get('profile_residence_time'), profile_residence_type=\n gold_app.get('profile_residence_type'), profile_address=gold_app.\n get('profile_address'), profile_education=gold_app.get(\n 'profile_education'), profile_college=(gold_app.get(\n 'profile_college') or {}).get('name'), job_name=gold_app.get(\n 'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get\n ('job_bpjs'), job_user_email=gold_app.get('job_user_email'),\n job_type=gold_app.get('job_type'), job_industry=gold_app.get(\n 'job_industry'), job_department=gold_app.get('job_department'),\n job_province=(gold_app.get('job_province') or {}).get('name'),\n job_city=(gold_app.get('job_city') or {}).get('name'), job_district\n =(gold_app.get('job_district') or {}).get('name'), job_address=\n gold_app.get('job_address'), amount=amount, amount_net=amount_net,\n interest_rate=interest_rate, term=gold_app.get('term'),\n origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=\n sub_bill.get('repay_at'), loan_success_times=loan_success_times,\n arrived_at=datetime.now(), follow_up_date=datetime.now(),\n promised_amount=promised_amount, promised_date=promised_date,\n external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=\n datetime.now())\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('overdue application %s created', application_id)\n Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,\n status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)\n add_contact(application)\n\n\ndef add_contact(application):\n logging.info('start add contact for application: %s', application.id)\n contacts = Contact.filter(Contact.user_id == application.user_id)\n existing_numbers = {contact.number for contact in contacts}\n insert_contacts = list()\n mon_insert_contact = {}\n user_mobile_no = number_strip(application.user_mobile_no)\n if user_mobile_no and user_mobile_no not in existing_numbers:\n insert_contacts.append({'user_id': application.user_id, 'name':\n application.user_name, 'number': user_mobile_no, 'relationship':\n Relationship.APPLICANT.value, 'source': 'apply info',\n 'real_relationship': Relationship.APPLICANT.value})\n existing_numbers.add(number_strip(application.user_mobile_no))\n extra_phone = GoldenEye().get('/users/%s/extra-phone' % application.user_id\n )\n if not extra_phone.ok:\n extra_phone = []\n logging.error('get user %s extra contacts failed', application.user_id)\n else:\n extra_phone = extra_phone.json()['data']\n if extra_phone:\n for i in extra_phone:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({'user_id': application.user_id, 'name':\n application.user_name, 'number': number, 'relationship':\n Relationship.APPLICANT.value, 'source': 'extra phone',\n 'real_relationship': Relationship.APPLICANT.value})\n key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n ec_contact = []\n contact = json.loads(application.contact or '[]')\n for i in contact:\n if number_strip(i['mobile_no']\n ) not in existing_numbers and number_strip(i['mobile_no']):\n ec_contact.append({'user_id': application.user_id, 'name': i[\n 'name'], 'number': number_strip(i['mobile_no']),\n 'relationship': Relationship.FAMILY.value, 'sub_relation':\n SubRelation.EC.value, 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number_strip(i['mobile_no']\n ), ContactType.F_EC.value\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['mobile_no']))\n if i['type'] != 1:\n continue\n if number_strip(i['tel_no']) not in existing_numbers and number_strip(i\n ['tel_no']):\n ec_contact.append({'user_id': application.user_id, 'name': i[\n 'name'], 'number': number_strip(i['tel_no']),\n 'relationship': Relationship.FAMILY.value, 'sub_relation':\n SubRelation.EC.value, 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number_strip(i['tel_no']\n ), ContactType.F_EC.value\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['tel_no']))\n if ec_contact:\n Contact.insert_many(ec_contact).execute()\n if all((application.job_tel, number_strip(application.job_tel), \n number_strip(application.job_tel) not in existing_numbers)):\n insert_contacts.append({'user_id': application.user_id, 'name':\n None, 'number': number_strip(application.job_tel),\n 'relationship': Relationship.COMPANY.value, 'source':\n 'basic info job_tel', 'real_relationship': Relationship.COMPANY\n .value})\n key = user_mobile_no, number_strip(application.job_tel\n ), ContactType.C_BASIC_INFO_JOB_TEL.value\n mon_insert_contact[key] = 1, 0, None\n existing_numbers.add(number_strip(application.job_tel))\n sms_contacts = GoldenEye().get('/applications/%s/sms-contacts' %\n application.external_id)\n if not sms_contacts.ok:\n sms_contacts = []\n logging.info('get user %s sms contacts failed', application.external_id\n )\n else:\n sms_contacts = sms_contacts.json()['data']\n if sms_contacts:\n for i in sms_contacts:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({'user_id': application.user_id, 'name':\n i['name'][:128], 'number': number, 'relationship':\n Relationship.SUGGESTED.value, 'source': 'sms contacts',\n 'real_relationship': Relationship.SUGGESTED.value})\n key = user_mobile_no, number, ContactType.S_SMS_CONTACTS.value\n mon_insert_contact[key] = 1, 0, i['name'][:128]\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n cf = GoldenEye().get('/applications/%s/call/frequency' % application.\n external_id)\n if not cf.ok:\n call_frequency = []\n logging.error('get application %s call frequency error',\n application.external_id)\n else:\n call_frequency = cf.json()['data']\n insert_contacts = []\n fm = GoldenEye().get('/applications/%s/contact/family-member' %\n application.external_id)\n if not fm.ok:\n family = []\n logging.error('get application %s family-member info error',\n application.external_id)\n else:\n family = fm.json()['data']\n if family:\n for i in family:\n if not i.get('number'):\n logging.info('family member %s' % str(i))\n continue\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n logging.info('family members: %s' % str(i))\n insert_contacts.append({'user_id': application.user_id, 'name':\n i['name'][:128], 'number': number, 'relationship':\n Relationship.FAMILY.value, 'source': FamilyContactType.\n CALLEC.value, 'total_count': i.get('total_count', 1),\n 'total_duration': i.get('total_duration', 0),\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number, ContactType.F_CALL_EC.value\n mon_insert_contact[key] = i.get('total_count', 1), i.get(\n 'total_duration', 0), i['name'][:128]\n existing_numbers.add(number)\n mon_update_contact = {}\n if call_frequency:\n with db.atomic():\n count = 1\n for i in call_frequency:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n Contact.update(total_count=i['total_count'],\n total_duration=i['total_duration']).where(Contact.\n number == number, Contact.user_id == application.\n user_id)\n key = user_mobile_no, number\n mon_update_contact[key] = i['total_count'], i[\n 'total_duration']\n continue\n if count < 6:\n insert_contacts.append({'user_id': application.user_id,\n 'name': i['name'][:128], 'number': number,\n 'relationship': Relationship.FAMILY.value,\n 'total_count': i['total_count'], 'total_duration':\n i['total_duration'], 'source': FamilyContactType.\n CALLTOP5.value, 'real_relationship': Relationship.\n FAMILY.value})\n count += 1\n key = user_mobile_no, number, ContactType.F_CALL_TOP5.value\n mon_insert_contact[key] = i['total_count'], i[\n 'total_duration'], i['name'][:128]\n else:\n insert_contacts.append({'user_id': application.user_id,\n 'name': i['name'][:128], 'number': number,\n 'relationship': Relationship.SUGGESTED.value,\n 'total_count': i['total_count'], 'total_duration':\n i['total_duration'], 'source': 'call frequency',\n 'real_relationship': Relationship.SUGGESTED.value})\n key = (user_mobile_no, number, ContactType.\n S_CALL_FREQUENCY.value)\n mon_insert_contact[key] = i['total_count'], i[\n 'total_duration'], i['name'][:128]\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n next_apply_list = AccountService().add_contact(application.user_id)\n for next_apply in next_apply_list:\n number = number_strip(str(next_apply))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=application.\n user_name, number=number, relationship=Relationship.\n SUGGESTED.value, source='online profile phone',\n real_relationship=Relationship.SUGGESTED.value)\n key = (user_mobile_no, number, ContactType.\n S_ONLINE_PROFILE_PHONE.value)\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n next_applicant = GoldenEye().get('/bomber/%s/dual_contact' %\n application.user_id)\n if not next_applicant.ok:\n next_applicant = []\n logging.error('get user %s dual_contact contacts failed' %\n application.user_id)\n else:\n next_applicant = next_applicant.json()['data']\n if next_applicant:\n for i in next_applicant:\n number = number_strip(str(i))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=\n application.user_name, number=number, relationship=\n Relationship.APPLICANT.value, source='apply info',\n real_relationship=Relationship.APPLICANT.value)\n key = user_mobile_no, number, ContactType.A_APPLY_INFO.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n numbers = []\n try:\n numbers = AccountService().ktp_number(path_params={'user_id':\n application.user_id})\n except Exception as e:\n logging.info('request ktp numbers failed %s' % str(e))\n for n in numbers:\n number = number_strip(str(n))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=application.\n user_name, number=number, relationship=Relationship.\n APPLICANT.value, source='ktp number', real_relationship=\n Relationship.APPLICANT.value)\n key = user_mobile_no, number, ContactType.A_KTP_NUMBER.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n try:\n ecs = GoldenEye().get('/applications/%s/contact/ec' % application.\n external_id)\n except Exception as e:\n logging.info('request ec-member error: %s' % str(e))\n try:\n if not ecs.ok:\n ec = []\n logging.info('get application %s ec-member info error',\n application.external_id)\n else:\n ec = ecs.json()['data']\n if ec:\n for e in ec:\n number = number_strip(e['numbers'])[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=e[\n 'name'][:128], number=number, relationship=\n Relationship.FAMILY.value, source=FamilyContactType\n .CONTACTEC.value, real_relationship=Relationship.\n FAMILY.value)\n key = (user_mobile_no, number, ContactType.F_CONTACT_EC\n .value)\n mon_insert_contact[key] = 1, 0, e['name'][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add ec_member error:%s' % str(e))\n try:\n mn = GoldenEye().get('/applications/%s/contact/my_number' %\n application.external_id)\n except Exception as e:\n logging.info('request my_number error: %s' % str(e))\n try:\n if not mn.ok:\n my = []\n logging.info('get application %s my_number info error',\n application.external_id)\n else:\n my = mn.json()['data']\n if my:\n for m in my:\n number = number_strip(m)[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=my[m][\n :128], number=number, relationship=Relationship.\n SUGGESTED.value, source='my number',\n real_relationship=Relationship.SUGGESTED.value)\n key = user_mobile_no, number, ContactType.S_MY_NUMBER.value\n mon_insert_contact[key] = 1, 0, my[m][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add my_member error:%s' % str(e))\n try:\n cn = GoldenEye().get('/applications/%s/contact/company-number' %\n application.external_id)\n except Exception as e:\n logging.info('request company-number error: %s' % str(e))\n try:\n if not cn.ok:\n cn = []\n logging.info('get application %s company_number info error',\n application.external_id)\n else:\n cn = cn.json()['data']\n if cn:\n for c in cn:\n number = c\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=cn[c][\n :128], number=number, relationship=Relationship.\n COMPANY.value, source='company', real_relationship=\n Relationship.COMPANY.value)\n key = user_mobile_no, number, ContactType.C_COMPANY.value\n mon_insert_contact[key] = 1, 0, cn[c][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add company_member error:%s' % str(e))\n try:\n ol = AccountService().other_login_contact(userId=application.user_id)\n except Exception as e:\n logging.error('request other_login error: %s' % e)\n ol = {}\n try:\n for o in ol:\n number = number_strip(o)\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=ol[o][:128\n ], number=number, relationship=Relationship.SUGGESTED.\n value, source='other_login', real_relationship=\n Relationship.SUGGESTED.value)\n key = user_mobile_no, number, ContactType.S_OTHER_LOGIN.value\n mon_insert_contact[key] = 1, 0, ol[o][:128]\n except Exception as e:\n logging.error('add other_login number error:%s' % e)\n logging.info('add contact for application %s finished', application.id)\n if mon_insert_contact or mon_update_contact:\n send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON, {\n 'user_mobile_no': user_mobile_no, 'insert_contact': str(\n mon_insert_contact), 'update_contact': str(mon_update_contact),\n 'user_id': application.user_id, 'name': application.user_name})\n\n\n<mask token>\n\n\ndef get_contact_from_mongo(number):\n if not number:\n return []\n query = TotalContact.objects(src_number=number, source__in=TotalContact\n .available()).order_by('source')\n lst = []\n for c in query:\n relation = TotalContact.relationship(c.source)\n if relation == -1:\n continue\n source = TotalContact.str_source(c.source)\n if not source:\n continue\n lst.append({'related_number': c.dest_number, 'source': source,\n 'is_calc': c.is_calc, 'total_count': c.total_count,\n 'total_duration': c.total_duration, 'relation': relation,\n 'name': c.dest_name})\n return lst\n\n\n<mask token>\n\n\ndef check_key_not_none(payload, keys):\n for key in keys:\n if payload.get(key) is None:\n logging.error('Missing args {}'.format(str(key)))\n return False\n return True\n\n\n@action(MessageAction.BILL_PAID)\ndef bill_paid(payload, msg_id):\n validate = check_key_not_none(payload, ['external_id', 'late_fee_part',\n 'principal_part', 'paid_at', 'bill_sub_id', 'partner_bill_id'])\n if not validate:\n logging.error('payload key not fully pass in.')\n return\n external_id = payload['external_id']\n late_fee_part = Decimal(payload['late_fee_part'])\n principal_part = Decimal(payload['principal_part'])\n paid_at = payload['paid_at']\n partner_bill_id = payload['partner_bill_id']\n logging.debug(\n 'application %s paid principal part %s, paid late fee part %s',\n external_id, principal_part, late_fee_part)\n application = Application.filter(Application.external_id == external_id\n ).order_by(-Application.created_at).first()\n if not application:\n logging.info('application %s paid, not found application', external_id)\n return\n sub_bill_id = payload['bill_sub_id']\n overdue_bill = OverdueBillR.select().where(OverdueBillR.collection_id ==\n application.id, OverdueBillR.sub_bill_id == sub_bill_id).first()\n if (application.type == ApplicationType.CASH_LOAN_STAGING.value and not\n overdue_bill):\n logging.info('bill sub not in bomber %s', sub_bill_id)\n return\n with db.atomic():\n repay_at = str_no_utc_datetime(payload['latest_repay_at'])\n Application.update(repay_at=repay_at).where(Application.id ==\n application.id).execute()\n RepaymentLog.create(application=application.id, is_bombed=True,\n current_bomber=application.latest_bomber_id, cycle=application.\n cycle, principal_part=principal_part, late_fee_part=\n late_fee_part, repay_at=paid_at, ptp_bomber=application.\n ptp_bomber, latest_call=application.latest_call, periods=\n overdue_bill.periods if overdue_bill else None, overdue_bill_id\n =overdue_bill.id if overdue_bill else None, partner_bill_id=\n partner_bill_id)\n phone_status = PhoneStatus.CONNECTED.value\n real_relationship = RealRelationship.user_values()\n commit = CallActionCommit.NO.value\n number = CallActions.select(CallActions.number).where(CallActions.\n phone_status == phone_status, CallActions.real_relationship <<\n real_relationship, CallActions.commit == commit, CallActions.\n application == application.id).order_by(-CallActions.created_at\n ).first()\n if number:\n Contact.update(call_priority=PriorityStatus.REPAY.value).where(\n Contact.user_id == application.user_id, Contact.\n call_priority == PriorityStatus.LAST.value).execute()\n Contact.update(call_priority=PriorityStatus.LAST.value).where(\n Contact.user_id == application.user_id, Contact.number ==\n number.number).execute()\n if not application.latest_bomber_id:\n return\n Inbox.create(title='application %s,sub_bill_id %s repaid' % (\n application.external_id, sub_bill_id), content=\n 'application %s,sub_bill_id %s repaid' % (application.\n external_id, sub_bill_id), receiver=application.\n latest_bomber_id or application.last_bomber_id, category=\n InboxCategory.REPAID.value)\n\n\n@action(MessageAction.BILL_RELIEF)\ndef bill_relief(payload, msg_id):\n \"\"\"已废弃\"\"\"\n bill = payload['head_bill']\n repay_at = str_no_utc_datetime(bill['latest_repay_at'])\n updated_row = Application.update(repay_at=repay_at).where(Application.\n id == bill['external_id']).execute()\n logging.info('application %s bill relief done', bill['external_id'])\n return updated_row\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)\ndef calc_overdue_days_over(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query = Application.update(overdue_days=overdue_days).where(Application\n .status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.\n overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)\n updated_rows_count = query.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count)\n try:\n calc_overdue_days_over_instalment()\n except Exception as e:\n logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))\n apps = Application.filter(Application.status << [ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value], Application.overdue_days > 95, \n Application.promised_date.is_null(True) | (fn.DATE(Application.\n promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {\n 'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n\n\ndef calc_overdue_days_over_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days > 95)\n updated_rows_count = query.execute()\n logging.info(\n 'calc_overdue_days_over_instalment done,count:%s,status:%s' % (\n updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.collection_id,\n OverdueBill.overdue_days).join(Application, JOIN_LEFT_OUTER, on\n =OverdueBill.collection_id == Application.id).where(Application\n .status == status, Application.type == ApplicationType.\n CASH_LOAN_STAGING.value)\n app_update = {}\n for ob in overdue_bills:\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)\ndef calc_overdue_days(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query_unclaimed = Application.update(overdue_days=overdue_days).where(\n Application.status == ApplicationStatus.UNCLAIMED.value, \n Application.overdue_days <= 95, Application.type == ApplicationType\n .CASH_LOAN.value)\n updated_rows_count_unclaimed = query_unclaimed.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_unclaimed)\n query_processing = Application.update(overdue_days=overdue_days).where(\n Application.status == ApplicationStatus.PROCESSING.value, \n Application.overdue_days <= 95, Application.type == ApplicationType\n .CASH_LOAN.value)\n updated_rows_count_processing = query_processing.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_processing)\n query_test = Application.update(overdue_days=overdue_days).where(\n Application.status == ApplicationStatus.AB_TEST.value, Application.\n overdue_days <= 95, Application.type == ApplicationType.CASH_LOAN.value\n )\n updated_rows_count_test = query_test.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_test)\n calc_overdue_days_instalment()\n apps = Application.select(Application.id).where(Application.status << [\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value, ApplicationStatus.AB_TEST.value], Application.overdue_days <=\n 95, Application.promised_date.is_null(True) | (fn.DATE(Application.\n promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {\n 'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n Application.update(C1A_entry=datetime.now()).where(Application.status <<\n [ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value, ApplicationStatus.AB_TEST.value], Application.overdue_days == 4\n ).execute()\n\n\ndef calc_overdue_days_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n today_now_time = datetime.now()\n month_first_day = today_now_time.replace(day=1, hour=1, minute=30,\n second=0, microsecond=0)\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days <= 95)\n updated_rows_count = query.execute()\n logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %\n (updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.\n created_at, OverdueBill.collection_id, OverdueBill.overdue_days\n ).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.\n collection_id == Application.id).where(Application.status ==\n status, Application.type == ApplicationType.CASH_LOAN_STAGING.value\n )\n app_update = {}\n for ob in overdue_bills:\n if (ob.status == ApplicationStatus.REPAID.value and ob.\n created_at < month_first_day):\n continue\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)\ndef automatic_escalation(payload, msg_id):\n app_ids = payload.get('application_list', [])\n if not app_ids:\n return\n apps = Application.select().where(Application.id.in_(app_ids), \n Application.status != ApplicationStatus.REPAID.value)\n for a in apps:\n new_cycle = application_entry_different_calculations(a)\n if a.overdue_days < 90:\n logging.info(\n 'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'\n .format(a.id, new_cycle, a.cycle, a.overdue_days))\n if new_cycle > a.cycle:\n with db.atomic():\n if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle\n .C1B.value):\n bomber_id = (a.latest_bomber_id if a.latest_bomber_id else\n a.cycle)\n DispatchAppHistory.update(out_at=datetime.now(),\n out_overdue_days=a.overdue_days).where(\n DispatchAppHistory.application == a.id, \n DispatchAppHistory.bomber_id == bomber_id).execute()\n Escalation.create(application=a.id, type=EscalationType.\n AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,\n current_cycle=a.cycle, escalate_to=new_cycle,\n current_bomber_id=a.latest_bomber)\n dis_app_update = DispatchApp.update(status=DisAppStatus.\n ABNORMAL.value).where(DispatchApp.application == a.id)\n dis_app_update.execute()\n a.cycle = new_cycle\n a.last_bomber = a.latest_bomber\n a.status = ApplicationStatus.UNCLAIMED.value\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n if new_cycle == Cycle.C1B.value:\n a.C1B_entry = datetime.now()\n elif new_cycle == Cycle.C2.value:\n a.C2_entry = datetime.now()\n elif new_cycle == Cycle.C3.value:\n a.C3_entry = datetime.now()\n a.save()\n logging.info('automatic escalation done')\n\n\ndef application_entry_different_calculations(app):\n conf = {(1): [1, 10], (2): [11, 30], (3): [31, 60], (4): [61, 90], (5):\n [91, 999999]}\n for new_cycle, scopes in conf.items():\n if scopes[0] <= app.overdue_days <= scopes[1]:\n return new_cycle\n return app.cycle\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY)\ndef cron_summary(payload, msg_id):\n \"\"\"已废弃\"\"\"\n employees = Bomber.select(Bomber, Role).join(Role)\n summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,\n 'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,\n 'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':\n 0, 'sms_sent': 0} for i in employees}\n now_date = date.today()\n cal_date = now_date - timedelta(days=1)\n claimed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('claimed')).where(fn.DATE(Application.\n claimed_at) == cal_date, Application.status << [ApplicationStatus.\n PROCESSING.value, ApplicationStatus.REPAID.value], Application.\n latest_bomber.is_null(False)).group_by(Application.latest_bomber)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False)).group_by(\n Application.latest_bomber)\n completed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('completed')).where(Application.\n latest_bombing_time.is_null(False), fn.DATE(Application.\n latest_bombing_time) == cal_date, Application.latest_bomber.is_null\n (False)).group_by(Application.latest_bomber)\n escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(\n Escalation.id).alias('escalated')).where(fn.DATE(Escalation.\n created_at) == cal_date, Escalation.type == EscalationType.\n AUTOMATIC.value, Escalation.current_bomber.is_null(False), \n Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation\n .current_bomber)\n transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).\n alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==\n cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(\n Transfer.operator)\n promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(\n BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.\n created_at) == cal_date, BombingHistory.result == BombingResult.\n HAS_PROGRESS.value).group_by(BombingHistory.bomber)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,\n RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.\n current_bomber)\n calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id\n ).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==\n cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)\n calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.\n record_id).alias('calls_connected')).where(fn.DATE(CallLog.\n time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==\n '1').group_by(CallLog.user_id)\n sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(\n ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_\n (ConnectType.sms()), ConnectHistory.created_at >= cal_date, \n ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)\n for i in claimed:\n summary[i.latest_bomber_id]['claimed'] += i.claimed\n for i in completed:\n summary[i.latest_bomber_id]['completed'] += i.completed\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n for i in escalated:\n summary[i.current_bomber_id]['escalated'] += i.escalated\n for i in transferred:\n summary[i.operator_id]['transferred'] += i.transferred\n for i in promised:\n summary[i.bomber_id]['promised'] += i.promised\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in calls_made:\n summary[int(i.user_id)]['calls_made'] += i.calls_made\n for i in calls_connected:\n summary[int(i.user_id)]['calls_connected'] += i.calls_connected\n for i in sms_sent:\n summary[i.operator_id]['sms_sent'] += i.sms_sent\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'claimed': data['claimed'], 'completed': data['completed'],\n 'cleared': data['cleared'], 'escalated': data['escalated'],\n 'transferred': data['transferred'], 'promised': data['promised'\n ], 'amount_recovered': data['amount_recovered'], 'calls_made':\n data['calls_made'], 'calls_connected': data['calls_connected'],\n 'sms_sent': data['sms_sent'], 'date': cal_date})\n if insert_args:\n Summary.insert_many(insert_args).execute()\n cycle_args = []\n escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(\n Escalation.id).alias('escalated_in')).where(Escalation.status ==\n ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==\n cal_date).group_by(Escalation.escalate_to)\n for i in escalated_in:\n cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.\n escalated_in, 'date': cal_date})\n amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM\n (RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)\n for i in amount_recovered_total:\n amount_recovered_total = i.principal_part + i.late_fee_part\n cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':\n amount_recovered_total, 'date': cal_date})\n if cycle_args:\n Summary.insert_many(cycle_args).execute()\n logging.info('cal summary done')\n send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY2)\ndef cron_summary2(payload, msg_id):\n \"\"\"已废弃,定时任务还在执行,具体情况待确定\"\"\"\n cal_date = date.today() - timedelta(days=1)\n employees = Bomber.select(Bomber, Role).join(Role)\n auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,\n AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')\n ).where(fn.DATE(AutoCallActions.created_at) == cal_date)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.\n is_null(False), RepaymentLog.is_bombed == True)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False))\n auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,\n AutoCallActions.result)\n amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)\n cleared = cleared.group_by(Application.latest_bomber)\n summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,\n 'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered': \n 0} for e in employees}\n for a in auto_call_actions:\n summary[a.bomber_id]['answered_calls'] += a.count\n if a.result == AutoCallResult.PTP.value:\n summary[a.bomber_id]['ptp'] += a.count\n if a.result == AutoCallResult.FOLLOW_UP.value:\n summary[a.bomber_id]['follow_up'] += a.count\n if a.result == AutoCallResult.NOT_USEFUL.value:\n summary[a.bomber_id]['not_useful'] += a.count\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'answered_calls': data['answered_calls'], 'ptp': data['ptp'],\n 'follow_up': data['follow_up'], 'not_useful': data['not_useful'\n ], 'cleared': data['cleared'], 'amount_recovered': str(data[\n 'amount_recovered']), 'date': cal_date})\n if insert_args:\n Summary2.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_DISCOUNT_APPROVED)\ndef bomber_discount_approved(payload, msg_id):\n app_id = payload['id']\n msg_type = payload['msg_type']\n discount_to = payload['discount_to']\n effective_to = payload['effective_to']\n application = Application.filter(Application.id == app_id).first()\n if not application:\n logging.error(\n 'discount approved msg send failed application %s not found',\n app_id)\n return\n template = Template.select(Template.text, Template.app).where(Template.\n type == ConnectType.AUTO_SMS.value, Template.id << Template.\n get_auto_sms_tpl(msg_type), Template.app == application.app).first()\n if not template:\n logging.error('discount approved msg send failed template %s not found'\n , msg_type)\n return\n promised_date = None\n if application.promised_date:\n promised_date = application.promised_date.strftime('%d-%m-%Y')\n tpl_data = {'user_name': application.user_name, 'due_days': application\n .overdue_days, 'app_name': application.app, 'phone': application.\n user_mobile_no, 'cs_number': cs_number_conf.get(application.app,\n '02150202889'), 'promised_date': promised_date, 'discount_to':\n discount_to, 'effective_to': effective_to}\n content = template.text.format(**tpl_data)\n data_list = [{'receiver': '62' + application.user_mobile_no, 'content':\n content, 'title': ''}]\n send_sms(data_list, msg_type, application.app)\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST)\ndef bomber_auto_call_list(payload, msg_id):\n with db.atomic():\n bomber_dispatch_app()\n dispatch_instalment_app()\n dis_apps = DispatchApp.select(DispatchApp.application).where(\n DispatchApp.status == DisAppStatus.NORMAL.value)\n c1_apps = Application.select(Application.id, Application.cycle,\n Application.follow_up_date, Application.called_times).where(Application\n .status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.\n AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.\n is_rejected == False, Application.promised_date.is_null(True) | (fn\n .DATE(Application.promised_date) < datetime.today().date())).order_by(\n Application.overdue_days, Application.apply_at)\n dis_apps_ids = [da.application_id for da in dis_apps]\n insert_args = []\n for a in c1_apps:\n if a.id in dis_apps_ids:\n continue\n insert_args.append({'application': a.id, 'cycle': a.cycle,\n 'follow_up_date': a.follow_up_date, 'called_times': 1 if a.\n called_times else 0, 'description': 'init'})\n if not insert_args:\n logging.error('no application need auto call')\n with db.atomic():\n AutoCallList.delete().execute()\n for idx in range(0, len(insert_args), 100):\n AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()\n for idx in range(0, len(insert_args), 100):\n application_list = [i['application'] for i in insert_args[idx:idx +\n 100]]\n send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {\n 'application_list': application_list})\n logging.info('bomber generate auto call list finished')\n send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})\n\n\nclass ChangeBomberTool(object):\n\n @staticmethod\n def in_record(bomber_id, ids, bd):\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.\n alias('application_id'), R(str(bomber_id)).alias('bomber_id'),\n fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(\n 'expected_out_time'), Application.overdue_days.alias(\n 'entry_overdue_days')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << ids)\n Application.update(latest_bomber=bomber_id).where(Application.id.\n in_(ids)).execute()\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bd),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n @staticmethod\n def out_record(a, bd):\n _id = str(a.id)\n DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a\n .overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]\n .get('principal_paid')), out_late_fee_pending=bd[_id].get(\n 'late_fee') - bd[_id].get('late_fee_paid')).where(\n DispatchAppHistory.application == a.id, DispatchAppHistory.\n bomber_id == a.latest_bomber_id).execute()\n a.last_bomber = a.latest_bomber\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n a.save()\n\n @staticmethod\n def classify(l, b):\n if len(l) == 1:\n return l[0]\n _l = filter(lambda x: x['bomber'] != b, l)\n return min(_l, key=lambda x: len(x['ids']))\n\n\n<mask token>\n\n\ndef bomber_dispatch_app():\n try:\n c1a_dispatch_app()\n except Exception as e:\n logging.error('c1a_dispatch_app error:%s' % str(e))\n cycle = {(1): 10, (2): 30, (3): 60, (4): 90}\n apps = Application.select().where(fn.DATE(Application.C2_entry) == date\n .today(), Application.type == ApplicationType.CASH_LOAN.value)\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C2.value)\n apps_ids = [a.id for a in apps]\n dispatch_inserts = []\n start_index = 0\n apps_length = len(apps_ids)\n logging.warning('apps length %s' % str(apps_length))\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)\n gen = CycleIter([b.id for b in bombers])\n existing_list = []\n end_index = start_index + int(apps_length * p.app_percentage)\n logging.info('partner length %s' % str(end_index))\n if not apps_ids[start_index:end_index]:\n continue\n bills = BillService().bill_list(application_ids=apps_ids[\n start_index:end_index])\n bill_dict = {bill['application_id']: bill for bill in bills}\n for a_id in apps_ids[start_index:end_index]:\n bomber = average_gen(gen, existing_list)\n q = DispatchApp.delete().where(DispatchApp.application == a_id\n ).execute()\n dispatch_inserts.append({'application': a_id, 'bomber': bomber,\n 'partner': p.id})\n application = Application.select().where(Application.id == a_id\n ).first()\n application.latest_bomber = bomber\n application.status = ApplicationStatus.AB_TEST.value\n application.ptp_bomber = None\n application.save()\n day_next_cycle = cycle.get(application.cycle\n ) - application.overdue_days\n DispatchAppHistory.create(application=a_id, partner_id=p.id,\n bomber_id=bomber, entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - Decimal(\n bill_dict[a_id].get('principal_paid')),\n entry_late_fee_pending=Decimal(bill_dict[a_id].get(\n 'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')\n ), expected_out_time=date.today() + timedelta(days=\n day_next_cycle))\n start_index = end_index\n with db.atomic():\n for idx in range(0, len(dispatch_inserts), 100):\n DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()\n config = SystemConfig.prefetch(SCI.AB_TEST_C2)\n c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)\n c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)\n c2 = apps_ids[start_index:]\n if c2:\n bills = BillService().bill_list(application_ids=c2)\n else:\n bills = []\n bill_dict = {bill['application_id']: bill for bill in bills}\n logging.info('c2 AB_test length: %s' % str(c2))\n gen = CycleIter(c2_bomber)\n existing_list = []\n for c in c2:\n bomber = average_gen(gen, existing_list)\n application = Application.filter(Application.id == c).first()\n application.status = ApplicationStatus.AB_TEST.value\n application.latest_bomber = bomber\n application.ptp_bomber = None\n application.save()\n day_next_cycle = 46 - application.overdue_days\n DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at\n =datetime.now(), entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - bill_dict[c].get(\n 'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(\n 'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),\n expected_out_time=date.today() + timedelta(days=day_next_cycle))\n ab_test_other()\n\n\ndef c1a_dispatch_app():\n today = datetime.today().date()\n tomorrow = today + timedelta(days=1)\n c1a_apps = Application.select().where(Application.status << [\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value], Application.dpd1_entry >= today, Application.dpd1_entry <\n tomorrow, Application.type == ApplicationType.CASH_LOAN.value)\n all_aids = [a.id for a in c1a_apps]\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C1A.value)\n end = 0\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n is_del == 0)\n start = end\n end += int(len(all_aids) * p.app_percentage)\n aids = all_aids[start:end]\n bids = [b.id for b in bombers]\n if not bids or not aids:\n continue\n average_number = get_average_number(len(aids), len(bids))\n p_end = 0\n for i, bid in enumerate(bids):\n p_start = p_end\n p_end += average_number[i]\n b_aids = aids[p_start:p_end]\n with db.atomic():\n q = Application.update(latest_bomber=bid, status=\n ApplicationStatus.AB_TEST.value).where(Application.id <<\n b_aids).execute()\n params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,\n 'application_ids': b_aids, 'dest_bomber_id': bid}\n new_in_record(**params)\n try:\n dispatch_inserts = []\n for aid in b_aids:\n dispatch_inserts.append({'application': aid, 'bomber':\n bid, 'partner': p.id, 'status': DisAppStatus.NORMAL\n .value})\n if dispatch_inserts:\n q = DispatchApp.insert_many(dispatch_inserts).execute()\n except Exception as e:\n logging.error('c1a分件写入dispatch_app error:%s' % str(e))\n\n\n<mask token>\n\n\ndef get_cash_bomber(bids, cycle):\n cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==\n 0, Bomber.instalment != cycle)\n cash_bids = [b.id for b in cash_bombers]\n return cash_bids\n\n\n<mask token>\n\n\ndef out_and_in_record_instalment(**kwargs):\n if not kwargs.get('application_ids'):\n return\n out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(\n DispatchAppHistory.application << kwargs['application_ids'],\n DispatchAppHistory.out_at.is_null(True)).execute()\n cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}\n period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')\n kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(kwargs['dest_bomber_id'])).alias(\n 'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.\n alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).\n alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << kwargs[\n 'application_ids'])\n application_list = list(subquery)\n for idx in range(0, len(application_list), 50):\n applications = application_list[idx:idx + 50]\n app_ids = [i.application_id for i in applications]\n overdue_bills = OverdueBill.select().where(OverdueBill.\n collection_id << app_ids)\n sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]\n bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)\n insert_args = lambad_instalment_result(bill_list, applications)\n if not insert_args:\n continue\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef c1b_dispatch_in_record(**kwargs):\n app_ids = kwargs.get('apps')\n partner_id = kwargs.get('partner_id', 'null')\n bill_dict = kwargs.get('bill_dict')\n period = kwargs.get('period')\n bomber_id = kwargs.get('bomber_id')\n if not all([app_ids, partner_id, bill_dict, period]):\n return False\n bill_dict = {str(k): v for k, v in bill_dict.items()}\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().\n alias('entry_at'), Application.overdue_days.alias(\n 'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.id << app_ids)\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef check_call_history(application):\n app_create_at = application.created_at + timedelta(days=4)\n if datetime.today().date() > app_create_at.date():\n call_actions = CallActions.select().where(CallActions.type == 0, \n CallActions.application == application.id, CallActions.\n created_at > datetime.now() - timedelta(days=5))\n for call in call_actions:\n if call.phone_status == PhoneStatus.CONNECTED.value:\n return True\n return False\n return True\n\n\n@action(MessageAction.BOMBER_SCAVENGER)\ndef scavenger(payload, msg_id):\n scavenger_time = -60\n scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'SCAVENGER_TIME').first()\n if scavenger and scavenger.value.isdigit():\n scavenger_time = -int(scavenger.value)\n update_auto_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value, description='scavenger').where(AutoCallList.status ==\n AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime\n .now() + timedelta(minutes=scavenger_time))\n count = update_auto_call_list.execute()\n logging.info('scavenger processed %s application', count)\n mail_box_scavenger_time = -30\n mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'MAIL_BOX_SCAVENGER_TIME').first()\n if mail_box_scavenger and mail_box_scavenger.value.isdigit():\n mail_box_scavenger_time = -int(mail_box_scavenger.value)\n update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.\n value, AutoCallList.updated_at < datetime.now() + timedelta(minutes\n =mail_box_scavenger_time))\n mail_box_count = update_mail_box_call_list.execute()\n logging.info('scavenger update mail box %s', mail_box_count)\n update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value\n ).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.\n updated_at < datetime.now() + timedelta(minutes=-30))\n ivr_result = update_auto_ivr.execute()\n logging.info('scavenger update %s ivr' % ivr_result)\n\n\n<mask token>\n\n\n@action(MessageAction.REPORT_BOMBER_COLLECTION)\ndef report_bomber_collection(payload, msg_id):\n start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)\n ).scalar()\n now = datetime.now()\n if start_date and str(start_date) == str(now)[:10]:\n return\n end_date = str(now + timedelta(days=1))[:10]\n start_date = str(now)[:10]\n dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))\n all_overdue_loan_sql1 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n GROUP BY 1;\n \"\"\"\n s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()\n d1 = OperatedDict(s_data1)\n all_overdue_loan_sql2 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE DATE(ba.follow_up_date) > CURDATE()\n AND ba.called_counts = 0\n GROUP BY 1;\n \"\"\"\n s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()\n d2 = OperatedDict(s_data2)\n overdue_loans_entered_into_predict_call_system_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE ba.called_counts >= 1\n GROUP BY 1;\n \"\"\"\n s_data3 = readonly_db.execute_sql(\n overdue_loans_entered_into_predict_call_system_sql).fetchall()\n d3 = OperatedDict(s_data3)\n loans_completed_sql = \"\"\"\n SELECT ba.cycle, COUNT(DISTINCT ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()\n d4 = OperatedDict(s_data4)\n connected_calls_automatic_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()\n d5 = OperatedDict(s_data5)\n connected_calls_manual_sql = \"\"\"\n SELECT bb.cycle, COUNT(bb.id)\n FROM bomber.bombing_history bb\n WHERE DATE(bb.created_at) = curdate()\n AND (bb.bomber_id < 150 OR bb.bomber_id > 200)\n GROUP BY bb.cycle;\n \"\"\"\n s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()\n d6 = OperatedDict(s_data6)\n logging.info('Directly get data from database successfully.')\n c1 = d1 - d2\n c2 = d3\n c3 = c2 / c1\n c4 = d4\n c5 = c4 / c2\n c6 = d5\n c7 = c6 / c4\n c8 = d6\n c9 = OperatedDict(get_agent())\n c10 = (c6 + c8) / c9\n try:\n c11 = average_call_duration_team(start_date, end_date)\n except AttributeError:\n c11 = {}\n lst = []\n for i in range(1, 5):\n lst.append({'apply_date': start_date, 'cycle': dct[i],\n 'all_overdue_loan': c1.get(i, 0),\n 'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),\n 'of_overdue_loans_entered_into_predict_call_system': round(c3.\n get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),\n 'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *\n 100, 1), 'connected_calls_automatic': c6.get(i, 0),\n 'connected_calls_automatic_completed_loans': round(c7.get(i, 0),\n 1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i, \n 0), 'average_calls_agent': round(c10.get(i, 0), 1),\n 'average_call_duration_team': round(c11.get(i, 0), 1)})\n ReportCollection.insert_many(lst).execute()\n logging.info('report_bomber_collection:Done!')\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)\ndef bomber_auto_call_list_record(payload, msg_id):\n \"\"\"记录一年的auto_call_list,删除前一天的数据,增加今天的数据\"\"\"\n now = datetime.now()\n if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):\n date_sql = \"\"\"\n SELECT DATE(created_at) FROM auto_call_list_record\n GROUP BY DATE(created_at) limit 1\n \"\"\"\n del_date = db.execute_sql(date_sql).fetchone()[0]\n del_sql = \"\"\"\n DELETE FROM auto_call_list_record WHERE date(created_at) = %s\n \"\"\"\n db.execute_sql(del_sql, [del_date])\n sql = \"\"\"\n INSERT INTO auto_call_list_record\n SELECT * FROM auto_call_list\n \"\"\"\n db.execute_sql(sql)\n logging.info('bomber_auto_call_list_record done')\n\n\n<mask token>\n\n\ndef new_out_record(**kwargs):\n if not kwargs['application_ids']:\n return\n DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.\n bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.\n application << kwargs['application_ids'], DispatchAppHistory.out_at\n .is_null(True)).execute()\n if kwargs.get('month_dispatch'):\n return\n try:\n Application.update(ptp_bomber=None).where(Application.id << kwargs[\n 'application_ids']).execute()\n except Exception as e:\n logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[\n 'application_ids'], str(e)))\n\n\n<mask token>\n\n\ndef end_old_application(old_app, paid=False):\n if paid:\n if old_app.status == OldLoanStatus.WAITING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return\n if old_app.status == OldLoanStatus.PROCESSING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return old_app.application_id\n end_date = old_app.end_date\n now = datetime.now()\n if now >= max(end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n old_app.save()\n return old_app.application_id\n\n\n<mask token>\n\n\ndef start_old_application(old_app, cancel=False):\n application_id = old_app.application_id\n if cancel and old_app.status == OldLoanStatus.PAID.value:\n now = datetime.now()\n if old_app.start_date is None:\n old_app.status = OldLoanStatus.WAITING.value\n elif now >= max(old_app.end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n DispatchAppHistory.update(out_at=max(old_app.end_date, old_app.\n promised_date or now)).where(DispatchAppHistory.bomber_id ==\n old_app.bomber_id, DispatchAppHistory.application ==\n application_id).execute()\n else:\n old_app.status = OldLoanStatus.PROCESSING.value\n DispatchAppHistory.update(out_at=None).where(DispatchAppHistory\n .bomber_id == old_app.bomber_id, DispatchAppHistory.\n application == application_id).execute()\n old_app.save()\n return\n application = Application.get_or_none(Application.id == application_id,\n Application.status != ApplicationStatus.REPAID.value, Application.\n overdue_days > 90, Application.promised_date.is_null(True) | (fn.\n DATE(Application.promised_date) < datetime.today().date()))\n if not application:\n logging.error('Can not set old application %s to start collecting',\n application_id)\n return\n if old_app.status in OldLoanStatus.no_available():\n logging.info('%s has finished or paid', old_app.application_id)\n return\n config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)\n sp = config.get(SCI.OLD_APP_PERIOD, SCI.OLD_APP_PERIOD.default_value)\n old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value\n old_app.status = OldLoanStatus.PROCESSING.value\n old_app.bomber_id = old_app_bomber\n old_app.start_date = datetime.now()\n if not old_app.end_date:\n old_app.end_date = datetime.now() + timedelta(days=sp)\n old_app.save()\n in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,\n application_ids=[old_app.application_id], expected_out_time=str(\n old_app.end_date))\n\n\n<mask token>\n\n\ndef run_member_sql(sql):\n result = [0, 0]\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n sql_result = cursor.fetchone()\n if sql_result:\n result = sql_result\n except Exception as e:\n logging.info('run sql error: %s' % str(sql))\n return result\n\n\n<mask token>\n\n\ndef get_before_bomber(date_time):\n begin_time = str(date_time - timedelta(days=7))\n end_time = str(date_time)\n old_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at<date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (begin_time, begin_time))\n old_data = run_one_sql(old_sql)\n new_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at> '%s' \n and created_at<'%s'\n and overdue_days=1;\n \"\"\"\n % (begin_time, end_time))\n new_data = run_one_sql(new_sql)\n dpd4_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>date_add('%s',interval 1 day) \n and created_at< date_add('%s',interval 1 day)\n and overdue_days=4;\n \"\"\"\n % (begin_time, end_time))\n dpd4_data = run_one_sql(dpd4_sql)\n dpd2_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at< date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (end_time, end_time))\n dpd2_data = run_one_sql(dpd2_sql)\n all_money = old_data + new_data\n repayment = all_money - dpd4_data - dpd2_data\n pro = 0\n if all_money:\n pro = repayment / all_money * 100\n RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,\n proportion=pro, repayment=repayment)\n\n\n<mask token>\n\n\ndef get_c1a_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=19)\n repayment_sql = (\n \"\"\"\n select \n sum(b.principal_part+b.late_fee_part) as paid_amount, \n cdt\n from \n (select \n br.principal_part, br.late_fee_part, \n date(cdt) as cdt, br.repay_at, br.application_id\n from (\n select ba.id, ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n if not repayment:\n return\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n for d in repayment:\n repay = d[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n d[1], RepaymentReportInto.cycle == Cycle.C1A.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = round(repay, 3)\n pro = repay / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_c2_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n not_contain_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_money = run_one_sql(not_contain_sql)\n begin_date = date_time - timedelta(days=37)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n not_contain_repay_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_repay = run_all_sql(not_contain_repay_sql)\n if not not_contain_money and repayment:\n return\n for i in ContainOut.values():\n if i == ContainOut.NOT_CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(\n not_contain_money, 3), proportion='0', repayment=0,\n is_first_loan=is_first_loan, contain_out=ContainOut.\n NOT_CONTAIN.value)\n for repay in not_contain_repay:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n elif i == ContainOut.CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(all_money, 3),\n proportion='0', repayment=0, is_first_loan=\n is_first_loan, contain_out=ContainOut.CONTAIN.value)\n for repay in repayment:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\ndef get_c3_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id, ba.c3_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=30)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c3_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c3_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = '%s'\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 4\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C3.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n if not repayment:\n return\n for repay in repayment:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n repay[1], RepaymentReportInto.cycle == Cycle.C3.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = repay_money\n pro = 0\n if report.all_money and int(report.all_money):\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_static_bomber(begin_date):\n active_date = begin_date - timedelta(days=8)\n bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),\n BomberR.last_active_at.alias('active')).where(BomberR.\n last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])\n summary = []\n for bomber in bombers:\n summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':\n bomber.role.cycle, 'work_ind': 0})\n SummaryBomber.insert_many(summary).execute()\n\n\n<mask token>\n\n\n@time_logger\ndef get_call_and_made(end_date, begin_date, real_time_query=False):\n call_sql = (\n \"\"\"\n select \n bomber_id, \n count(case when relationship is not null then application_id end) \n as 'call_cnt', \n count(distinct case when relationship is not null then \n application_id end) as 'call_case',\n count(case when phone_status=4 then application_id end) as 'connect',\n count(distinct case when phone_status=4 then application_id end) \n as 'connect_case'\n from (\n select bomber_id,application_id,phone_status, cycle, relationship\n from bomber.call_actions ba\n where created_at>'%s' and created_at<'%s'\n and type in (0, 1)\n ) a\n group by 1\n \"\"\"\n % (begin_date, end_date))\n calls = run_all_sql(call_sql)\n if real_time_query:\n return calls\n for call in calls:\n bomber, call_cnt, case_made, connect_cnt, case_connect = call\n SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,\n call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(\n SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date\n ).execute()\n return calls\n\n\n@time_logger\ndef get_claimed_cnt(end_date, begin_date, real_time_query=False):\n table_date = begin_date - timedelta(days=30)\n claimed_sql = (\n \"\"\"\n SELECT\n COUNT( `t1`.`application_id` ) AS cnt,\n `t1`.`bomber_id` AS bomber_id \n FROM\n `dispatch_app_history` AS t1 \n WHERE\n ( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null ) \n AND ( `t1`.`bomber_id` != 1000 ) \n AND ( `t1`.`partner_id` IS null ) \n AND ( `t1`.`entry_at` > '%s' ) \n AND ( `t1`.`entry_at` < '%s' ) \n GROUP BY\n `t1`.`bomber_id`\n \"\"\"\n % (begin_date, table_date, end_date))\n claimeds = run_all_sql(claimed_sql)\n if real_time_query:\n return claimeds\n for claimed in claimeds:\n cnt, bomber_id = claimed\n SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == bomber_id).execute()\n return claimeds\n\n\ndef get_sms_data(end_data, begin_data):\n all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(\n 'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')\n ).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.\n created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())\n ).group_by(ConnectHistoryR.operator)\n for sms in all_sms:\n SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==\n begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()\n return all_sms\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_NEW)\ndef summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_active_bomber(begin_date)\n get_call_and_made(end_date, begin_date)\n get_claimed_cnt(end_date, begin_date)\n get_sms_data(end_date, begin_date)\n get_ptp_data(end_date, begin_date)\n get_recover_amount(end_date, begin_date)\n get_unfollowed(begin_date)\n get_unfollowed_call(begin_date)\n\n\ndef get_new_case_amount(begin_date, end_date):\n all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.\n entry_late_fee_pending + DispatchAppHistoryR.\n entry_principal_pending).alias('pending'), DispatchAppHistoryR.\n bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')\n ).where(DispatchAppHistoryR.entry_at > begin_date, \n DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.\n partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)\n for case in all_case:\n SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt\n =case.cnt).where(SummaryBomber.bomber_id == case.bomber_id, \n SummaryBomber.time == begin_date).execute()\n return all_case\n\n\n<mask token>\n\n\ndef get_kp_today(begin_date, end_date):\n sql = (\n \"\"\"\n select bomber_id, count(distinct application_id)\n from( \n SELECT bomber_id, application_id\n FROM bomber.auto_call_actions a\n WHERE promised_date >= '%s' AND created_at < '%s' \n AND EXISTS(select 1 from bomber.application ba \n where a.application_id=ba.id \n and (ba.finished_at is null \n or ba.finished_at > '%s'))\n UNION \n SELECT bomber_id, application_id\n FROM bomber.bombing_history b\n WHERE promised_date >= '%s' AND created_at < '%s'\n AND EXISTS(select 1 from bomber.application ba \n where b.application_id=ba.id \n and (ba.finished_at is null \n or ba.finished_at > '%s'))) result\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, begin_date, begin_date, end_date, begin_date)\n )\n kp_today = run_all_sql(sql)\n for kp in kp_today:\n SummaryBomber.update(KP_today_cnt=kp[1]).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == kp[0]).execute()\n\n\n<mask token>\n\n\ndef get_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c2_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c2_entry > '%s' \n AND ba.c2_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c3_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c3_entry > '%s' \n AND ba.c3_entry < '%s' \n ) a\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date, end_date))\n case_cleared_sums = run_all_sql(sql)\n for clear in case_cleared_sums:\n SummaryBomber.update(new_case_cleared_sum=clear[1]).where(\n SummaryBomber.time == begin_date, SummaryBomber.bomber_id ==\n clear[0]).execute()\n\n\n@time_logger\ndef get_new_case_call(begin_date, end_date, real_query_time=False):\n sql = (\n \"\"\"\n SELECT\n bd.bomber_id,\n count( DISTINCT bd.application_id )\n FROM\n bomber.dispatch_app_history bd\n INNER JOIN bomber.call_actions bc \n ON bd.application_id = bc.application_id \n AND bd.bomber_id = bc.bomber_id \n AND date( bd.entry_at ) = date( bc.created_at ) \n WHERE\n entry_at > '%s' \n AND entry_at < '%s' \n AND partner_id IS NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n new_case_calls = run_all_sql(sql)\n if real_query_time:\n return new_case_calls\n for call in new_case_calls:\n SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber\n .bomber_id == call[0], SummaryBomber.time == begin_date).execute()\n return new_case_calls\n\n\n@time_logger\ndef get_calltime_avg(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction,\n count( 1 ) AS auto_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' ' \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n AND an.recording is not null\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]][0] += data[1]\n result[data[0]][1] += data[2]\n continue\n result[data[0]] = [data[1], data[2]]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=\n value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == key).execute()\n return result\n\n\ndef get_no_calltime_avg(begin_date, end_date):\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND (an.status!='ANSWERED' or an.recording is null) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n for data in manuals:\n SummaryBomber.update(calltime_no_case_sum=data[1],\n calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /\n data[2] if data[2] else 0).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == data[0]).execute()\n\n\n@time_logger\ndef get_calltime_sum(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' '\n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]] += data[1]\n continue\n result[data[0]] = data[1]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == key).execute()\n return result\n\n\n<mask token>\n\n\n@action(MessageAction.UPDATE_SUMMARY_NEW)\ndef update_summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'UPDATE_SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_new_case_amount(begin_date, end_date)\n get_kp_cleared(begin_date, end_date)\n get_kp_today(begin_date, end_date)\n get_ptp_cnt(begin_date, end_date)\n get_ptp_call_cnt(begin_date, end_date)\n get_new_case_cleared(begin_date, end_date)\n get_new_case_call(begin_date, end_date)\n get_calltime_avg(begin_date, end_date)\n get_no_calltime_avg(begin_date, end_date)\n get_calltime_sum(begin_date, end_date)\n\n\ndef get_cycle_claimed(begin_date, end_date):\n sql = (\n \"\"\"\n select cycle,count(1)\n from bomber.application where cycle in (1,2,3,4)\n and (finished_at is null or (finished_at>'%s'))\n and created_at>'2018-09-01'\n group by 1\n \"\"\"\n % begin_date)\n result = run_all_sql(sql)\n return result\n\n\n@time_logger\ndef cycle_new_case(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( ba.id ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.created_at ) = bo.which_day_overdue \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c1b_entry ) = bo.which_day_overdue\n WHERE\n c1b_entry > '%s' \n AND c1b_entry < '%s' \n UNION\n SELECT\n 3 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c2_entry ) = bo.which_day_overdue \n WHERE\n c2_entry > '%s' \n AND c2_entry < '%s' \n UNION\n SELECT\n 4 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c3_entry ) = bo.which_day_overdue\n WHERE\n c3_entry > '%s' \n AND c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n all_datas = run_all_sql(sql)\n if real_time_query:\n return all_datas\n for data in all_datas:\n SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()\n return all_datas\n\n\n@time_logger\ndef get_cycle_new_case_call(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.created_at ) = date( bc.created_at ) \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c1b_entry ) = date( bc.created_at ) \n WHERE\n ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s'\n UNION\n SELECT\n 3 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c2_entry ) = date( bc.created_at ) \n WHERE\n ba.c2_entry > '%s' \n AND ba.c2_entry < '%s'\n UNION\n SELECT\n 4 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c3_entry ) = date( bc.created_at ) \n WHERE\n ba.c3_entry > '%s' \n AND ba.c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n cycle_datas = run_all_sql(sql)\n if real_time_query:\n return cycle_datas\n for data in cycle_datas:\n SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber\n .time == begin_date, SummaryBomber.cycle == data[0], \n SummaryBomber.bomber_id == data[0]).execute()\n return cycle_datas\n\n\ndef get_cycle_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n '1' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.created_at ) = date( br.repay_at ) \n WHERE ba.created_at > '%s' \n AND ba.created_at < '%s' \n GROUP BY 1, 2 ) a \n UNION\n SELECT \n '2' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n GROUP BY 1, 2) a\n \"\"\"\n % (begin_date, end_date, begin_date, end_date))\n cycle_cleared = run_all_sql(sql)\n for i in cycle_cleared:\n SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber\n .cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.\n time == begin_date).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_NEW_CYCLE)\ndef summary_new_cycle(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'SUMMARY_NEW_CYCLE').first()\n if worker_log and worker_log.logs >= 5:\n return\n cycle_datas = SummaryBomber.select(fn.SUM(SummaryBomber.\n new_case_amount_sum).alias('new_case_amount_sum'), fn.SUM(\n SummaryBomber.new_case_cleared_sum).alias('new_case_cleared_sum'),\n fn.SUM(SummaryBomber.case_made_cnt).alias('case_made_cnt'), fn.SUM(\n SummaryBomber.case_connect_cnt).alias('case_connect_cnt'), fn.SUM(\n SummaryBomber.promised_cnt).alias('promised_cnt'), fn.SUM(\n SummaryBomber.promised_amount).alias('promised_amount'), fn.SUM(\n SummaryBomber.cleared_cnt).alias('cleared_cnt'), fn.SUM(\n SummaryBomber.cleared_amount).alias('cleared_amount'), fn.SUM(\n SummaryBomber.new_case_cnt).alias('new_case_cnt'), fn.SUM(\n SummaryBomber.new_case_call_cnt).alias('new_case_call_cnt'), fn.SUM\n (SummaryBomber.unfollowed_cnt).alias('unfollowed_cnt'), fn.SUM(\n SummaryBomber.unfollowed_call_cnt).alias('unfollowed_call_cnt'), fn\n .SUM(SummaryBomber.call_cnt).alias('call_cnt'), fn.SUM(\n SummaryBomber.sms_cnt).alias('sms_cnt'), fn.SUM(SummaryBomber.\n call_connect_cnt).alias('call_connect_cnt'), fn.SUM(SummaryBomber.\n ptp_today_cnt).alias('ptp_today_cnt'), fn.SUM(SummaryBomber.\n ptp_today_call_cnt).alias('ptp_today_call_cnt'), fn.SUM(\n SummaryBomber.ptp_next_cnt).alias('ptp_next_cnt'), fn.SUM(\n SummaryBomber.ptp_next_call_cnt).alias('ptp_next_call_cnt'), fn.SUM\n (SummaryBomber.KP_cleared_cnt).alias('KP_cleared_cnt'), fn.SUM(\n SummaryBomber.KP_today_cnt).alias('KP_today_cnt'), fn.SUM(\n SummaryBomber.work_ind).alias('work_ind'), fn.SUM(SummaryBomber.\n calltime_sum).alias('calltime_sum'), fn.SUM(SummaryBomber.\n calltime_case_sum).alias('calltime_case_sum'), fn.SUM(SummaryBomber\n .calltime_case_cnt).alias('calltime_case_cnt'), fn.SUM(\n SummaryBomber.calltime_no_case_sum).alias('calltime_no_case_sum'),\n fn.SUM(SummaryBomber.calltime_no_case_cnt).alias(\n 'calltime_no_case_cnt'), SummaryBomber.cycle.alias('cycle')).where(\n SummaryBomber.time == begin_date, SummaryBomber.cycle << Cycle.values()\n ).group_by(SummaryBomber.cycle)\n for cycle_data in cycle_datas:\n SummaryBomber.create(bomber_id=cycle_data.cycle, time=begin_date,\n cycle=cycle_data.cycle, new_case_amount_sum=cycle_data.\n new_case_amount_sum, new_case_cleared_sum=cycle_data.\n new_case_cleared_sum, new_case_cleard_rate=0, case_made_cnt=\n cycle_data.case_made_cnt, case_made_rate=0, case_connect_cnt=\n cycle_data.case_connect_cnt, case_connect_rate=0, promised_cnt=\n cycle_data.promised_cnt, promised_amount=cycle_data.\n promised_amount, cleared_cnt=cycle_data.cleared_cnt,\n cleared_amount=cycle_data.cleared_amount, new_case_cnt=\n cycle_data.new_case_cnt, new_case_call_cnt=cycle_data.\n new_case_call_cnt, unfollowed_cnt=cycle_data.unfollowed_cnt,\n unfollowed_call_cnt=cycle_data.unfollowed_call_cnt, call_cnt=\n cycle_data.call_cnt, sms_cnt=cycle_data.sms_cnt,\n call_connect_cnt=cycle_data.call_connect_cnt, calltime_case_avg\n =0, ptp_today_cnt=cycle_data.ptp_today_cnt, ptp_today_call_cnt=\n cycle_data.ptp_today_call_cnt, ptp_next_cnt=cycle_data.\n ptp_next_cnt, ptp_next_call_cnt=cycle_data.ptp_next_call_cnt,\n KP_cleared_cnt=cycle_data.KP_cleared_cnt, KP_today_cnt=\n cycle_data.KP_today_cnt, KP_cleared_rate=0, work_ind=cycle_data\n .work_ind, calltime_sum=cycle_data.calltime_sum,\n calltime_case_sum=cycle_data.calltime_case_sum,\n calltime_case_cnt=cycle_data.calltime_case_cnt,\n calltime_no_case_sum=cycle_data.calltime_no_case_sum,\n calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,\n work_time_sum=cycle_data.work_time_sum)\n cycle_claimed = get_cycle_claimed(begin_date, end_date)\n for claimed in cycle_claimed:\n SummaryBomber.update(claimed_cnt=claimed[1]).where(SummaryBomber.\n time == begin_date, SummaryBomber.cycle == claimed[0], \n SummaryBomber.bomber_id == claimed[0]).execute()\n cycle_new_case(begin_date, end_date)\n get_cycle_new_case_call(begin_date, end_date)\n get_cycle_new_case_cleared(begin_date, end_date)\n get_cycle_case_made_cnt(begin_date, end_date)\n all_datas = SummaryBomber.filter(SummaryBomber.time == begin_date)\n for data in all_datas:\n cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum if\n data.new_case_amount_sum else 0) * 100\n data.new_case_cleard_rate = cl_rat\n case_made_rate = (data.case_made_cnt / data.claimed_cnt if data.\n claimed_cnt else 0) * 100\n data.case_made_rate = case_made_rate\n case_connect_rate = (data.case_connect_cnt / data.case_made_cnt if\n data.case_made_cnt else 0) * 100\n data.case_connect_rate = case_connect_rate\n calltime_case_avg = (data.calltime_case_sum / data.\n calltime_case_cnt if data.calltime_case_cnt else 0)\n data.calltime_case_avg = calltime_case_avg\n calltime_no_case_avg = (data.calltime_no_case_sum / data.\n calltime_no_case_cnt if data.calltime_no_case_cnt else 0)\n data.calltime_no_case_avg = calltime_no_case_avg\n KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt if data.\n KP_today_cnt else 0) * 100\n data.KP_cleared_rate = KP_cleared_rate\n data.save()\n\n\n<mask token>\n\n\ndef get_change_bomber():\n cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle\n .C3.value}\n result = {}\n bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,\n BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=\n BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.\n created_at) == date.today(), BomberLog.role_id << list(\n cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.\n instalment == 0).dicts()\n for b_log in bomber_logs:\n cycle = cycle_role_map.get(b_log['role_id'])\n group_id = b_log['group_id']\n if cycle in result:\n if group_id not in result[cycle]:\n result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}\n else:\n result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}}\n if b_log['operation'] == 0:\n result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])\n if result:\n bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map\n .keys())), Bomber.is_del == 0, Bomber.instalment == 0)\n for b in bombers:\n cycle_result = result.get(cycle_role_map[b.role_id], {})\n role_result = cycle_result.get(b.group_id)\n if not role_result:\n continue\n role_result['new_ids'].append(b.id)\n resutl_list = []\n for cycle, group_dict in result.items():\n resutl_list.extend(list(group_dict.values()))\n return resutl_list\n return []\n\n\n<mask token>\n\n\ndef get_surplus_application(new_ids, del_ids, average_nums, classified_apps):\n surplus_apps = []\n for del_id in del_ids:\n del_res = classified_apps.get(del_id, {})\n p_list = del_res.get('p_list', [])\n np_list = del_res.get('np_list', [])\n del_res['need_num'] = -(len(p_list) + len(np_list))\n del_res['to_list'] = np_list + p_list\n surplus_apps.extend(p_list)\n surplus_apps.extend(np_list)\n for index, bid in enumerate(new_ids):\n average = average_nums[index]\n bomber_app = classified_apps.get(bid)\n if not bomber_app:\n bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid\n ).first()\n bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':\n [], 'np_num': 0, 'need_num': average, 'partner_id': bomber.\n partner_id if bomber else ''}\n classified_apps[bid] = bomber_app\n else:\n p_num = len(bomber_app['p_list'])\n np_num = len(bomber_app['np_list'])\n if p_num > average:\n bomber_app['need_num'] = -np_num\n else:\n bomber_app['need_num'] = average - (p_num + np_num)\n bomber_app['p_num'] = p_num\n bomber_app['np_num'] = np_num\n if bomber_app['need_num'] < 0:\n random.shuffle(bomber_app['np_list'])\n res_over = bomber_app['np_list'][:-bomber_app['need_num']]\n bomber_app['to_list'] = res_over\n surplus_apps.extend(res_over)\n classified_apps_list = sorted(classified_apps.values(), key=lambda x: x\n ['need_num'], reverse=True)\n return surplus_apps, classified_apps_list\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)\ndef bomber_dispatch_applications(payload, msg_id):\n change_bombers = get_change_bomber()\n instalment_change_bombers = get_instalment_change_bomber()\n params = {ApplicationType.CASH_LOAN.value: change_bombers,\n ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}\n for type, bombers in params.items():\n change_bomber_dispatch_apps(change_bombers=bombers, type=type)\n\n\n@action(MessageAction.REPAIR_BOMBER)\ndef repair_bomber(payload, msg_id):\n app_mobile = payload['app_mobile']\n username = payload.get('user_name')\n logging.info('start repair bomber, number: %s' % app_mobile)\n if 'mobile_no' in payload and payload['mobile_no']:\n mobile = number_strip(str(payload['mobile_no']))[:64]\n name = payload.get('mobile_name')\n application = Application.filter(Application.user_mobile_no == mobile)\n if application.exists():\n repair_contact(app_mobile, application, username)\n add_relationship(app_mobile, mobile, username, name)\n if 'tel_no' in payload and payload['tel_no']:\n tel_no = number_strip(str(payload['tel_no']))[:64]\n name = payload.get('tel_name')\n application = Application.filter(Application.user_mobile_no == tel_no)\n if application.exists():\n repair_contact(app_mobile, application, username)\n add_relationship(app_mobile, tel_no, username, name)\n\n\n<mask token>\n\n\ndef get_summary_daily_time():\n mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')\n mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')\n now_date = datetime.now()\n now_date_time = now_date.time()\n today_str = str(now_date.date())\n if now_date_time < mid_time_t1.time():\n yes_date = now_date - timedelta(days=1)\n yes_date_str = str(yes_date.date())\n begin_str = yes_date_str + ' 17:20:00'\n end_str = today_str + ' 00:00:00'\n elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():\n begin_str = today_str + ' 00:00:00'\n end_str = today_str + ' 12:40:00'\n else:\n begin_str = today_str + ' 12:40:00'\n end_str = today_str + ' 17:20:00'\n begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')\n end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')\n summary_datetime = now_date - timedelta(minutes=30)\n summary_date = summary_datetime.date()\n return begin_time, end_time, summary_date\n\n\n@action(MessageAction.SUMMARY_DAILY)\ndef summary_daily_data(payload, msg_id):\n begin_time, end_time, summary_date = get_summary_daily_time()\n call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.\n bomber_id, CallActionsR.application_id, CallActionsR.promised_date,\n CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(\n CallActionsR.created_at >= begin_time, CallActionsR.created_at <\n end_time, CallActionsR.type << (0, 1))\n summary_dailys = {}\n for call in call_actions:\n if call.bomber_id not in summary_dailys:\n summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.\n bomber_id, 'summary_date': str(summary_date)}\n if call.name and call.number:\n summary_dailys[call.bomber_id]['call_cnt'] += 1\n if call.promised_date:\n summary_dailys[call.bomber_id]['ptp_cnt'] += 1\n C1_sql = (\n \"\"\"\n SELECT a.current_bomber_id,\n sum(principal_part+late_fee_part) as pay_amount,a.cycle\n from \n (select a.cycle,a.current_bomber_id,b.username,a.principal_part,\n a.late_fee_part,a.application_id,a.repay_at\n FROM bomber.repayment_log a ,bomber.bomber b\n WHERE a.repay_at >= '%s' AND a.repay_at <'%s'\n AND a.current_bomber_id !=''\n AND a.current_bomber_id = b.id\n and b.role_id in (1,2,4,5)\n and principal_part+late_fee_part>0\n group by 6,7) a\n GROUP BY a.cycle,a.current_bomber_id\n \"\"\"\n % (begin_time, end_time))\n C1_repayment = run_all_sql(C1_sql)\n other_sql = (\n \"\"\"\n select current_bomber_id,sum(pay_amount) as pay_amount,cycle\n from (\n select application_id,current_bomber_id,pay_amount,repay_at,cycle\n from (\n select br.application_id,br.current_bomber_id,\n br.principal_part+br.late_fee_part as pay_amount,br.repay_at,\n br.cycle\n from bomber.repayment_log br\n left join bomber.bomber bb on br.current_bomber_id=bb.id\n where exists (select 1 from bomber.bombing_history bb \n where br.current_bomber_id=bb.bomber_id \n and br.application_id=bb.application_id \n and bb.created_at<br.repay_at \n and (bb.promised_date is not null \n or bb.promised_amount is not null))\n and br.repay_at >= '%s'\n and br.repay_at < '%s'\n and bb.role_id in (3,6,7,8,9) \n and br.principal_part+br.late_fee_part > 0\n group by 1,4\n ) a\n group by 1,4) b\n group by 1\n \"\"\"\n % (begin_time, end_time))\n other_repayment = run_all_sql(other_sql)\n all_repayment = C1_repayment + other_repayment\n for res in all_repayment:\n bomber_id, pay_amount, cycle = res\n if bomber_id in summary_dailys:\n summary_dailys[bomber_id]['repayment'] += pay_amount\n else:\n summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': cycle, 'repayment': pay_amount, 'bomber_id':\n bomber_id, 'summary_date': str(summary_date)}\n insert_values = list(summary_dailys.values())\n if insert_values:\n SummaryDaily.insert_many(insert_values).execute()\n\n\n<mask token>\n\n\ndef get_app_logs(apps):\n app_logs = {}\n all_np_apps = []\n all_p_apps = []\n for a in apps:\n latest_bomber = a['latest_bomber']\n latest_bomber = a['cycle'] if not latest_bomber else latest_bomber\n if latest_bomber in app_logs:\n app_logs[latest_bomber]['to_ids'].append(a['id'])\n else:\n app_logs[latest_bomber] = {'bomber_id': latest_bomber, 'to_ids':\n [a['id']], 'np_ids': [], 'p_ids': []}\n if a['promised_date'] and a['promised_date'].date() >= datetime.now(\n ).date():\n app_logs[latest_bomber]['p_ids'].append(a['id'])\n all_p_apps.append(a)\n else:\n app_logs[latest_bomber]['np_ids'].append(a['id'])\n all_np_apps.append(a)\n return app_logs, all_np_apps, all_p_apps\n\n\ndef month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):\n apps = list(apps)\n np_apps = list(np_apps)\n random.shuffle(np_apps)\n apps_len = len(apps)\n np_apps_len = len(np_apps)\n end = 0\n all_app_precentage = 0\n partners = Partner.select().where(Partner.cycle == cycle, Partner.\n status == PartnerStatus.NORMAL.value)\n for p in partners:\n all_app_precentage += p.app_percentage\n for partner in partners:\n bombers = Bomber.select().where(Bomber.partner == partner.id, \n Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.\n value)\n bids = {b.id: b for b in bombers}\n if len(bids) == 0:\n logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))\n continue\n start = end\n if np_apps_len >= int(apps_len * all_app_precentage):\n end = start + int(apps_len * partner.app_percentage)\n else:\n end = start + int(np_apps_len * partner.app_percentage /\n all_app_precentage)\n partner_app = np_apps[start:end]\n dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)\n np_apps = np_apps[end:]\n return np_apps\n\n\ndef month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):\n sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',\n (4): 'AB_TEST_C3'}\n sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])\n sys_values = json.loads(sys_config.value)\n bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0\n )\n if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):\n bombers = bombers.where(Bomber.instalment == 0)\n bids = {b.id: b for b in bombers}\n if cycle == Cycle.C1A.value:\n np_ids = [a['id'] for a in np_apps]\n np = Application.update(status=ApplicationStatus.PROCESSING.value,\n ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids\n ).execute()\n bomber_app_logs = app_logs.get(cycle, {})\n out_param = {'application_ids': bomber_app_logs.get('to_ids', []),\n 'month_dispatch': 1, 'src_bomber_id': cycle}\n new_out_record(**out_param)\n in_param = {'cycle': cycle, 'application_ids': np_ids,\n 'dest_bomber_id': cycle}\n new_in_record(**in_param)\n bomber_app_logs['need_num'] = len(np_apps)\n bomber_app_logs['form_ids'] = np_ids\n bomber_app_logs['status'] = 1\n else:\n dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)\n dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)\n\n\ndef dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,\n type=ApplicationType.CASH_LOAN.value):\n apps = list(apps)\n random.shuffle(apps)\n bids_list = list(bids.keys())\n if len(bids_list) <= 0:\n logging.info('get_dispatch_app_to_bomber no bids')\n return\n average_num = get_average_number(len(apps), len(bids_list))\n bomber_end = 0\n with db.atomic():\n for index, bid in enumerate(bids_list):\n current_bomber = bids.get(bid)\n bomber_app_logs = app_logs.get(bid, {})\n bomber_start = bomber_end\n bomber_end = bomber_start + average_num[index]\n bomber_apps = apps[bomber_start:bomber_end]\n from_p, from_np, from_ids, status = [], [], [], 0\n for ba in bomber_apps:\n promised_date = ba.get('promised_date')\n from_ids.append(ba['id'])\n if promised_date and promised_date.date() >= date.today():\n from_p.append(ba['id'])\n else:\n from_np.append(ba['id'])\n app_status = ApplicationStatus.AB_TEST.value\n if (cycle == Cycle.C1A.value and not out_partner and type ==\n ApplicationType.CASH_LOAN.value):\n app_status = ApplicationStatus.PROCESSING.value\n if from_p:\n p = Application.update(ptp_bomber=bid, latest_bomber=bid,\n status=app_status).where(Application.id << from_p).execute(\n )\n p_ids = bomber_app_logs.get('p_ids', []) + from_p\n bomber_app_logs['p_ids'] = p_ids\n if from_np:\n np = Application.update(latest_bomber=bid, ptp_bomber=None,\n status=ApplicationStatus.AB_TEST.value).where(\n Application.id << from_np).execute()\n np_ids = bomber_app_logs.get('np_ids', []) + from_np\n bomber_app_logs['np_ids'] = np_ids\n in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.\n partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}\n if type == ApplicationType.CASH_LOAN.value:\n out_param = {'src_bomber_id': bid, 'application_ids':\n bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}\n new_out_record(**out_param)\n new_in_record(**in_param)\n else:\n out_and_in_record_instalment(**in_param)\n bomber_app_logs['status'] = 1\n need_num = bomber_app_logs.get('need_num', 0) + average_num[index]\n bomber_app_logs['need_num'] = need_num\n all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids\n bomber_app_logs['form_ids'] = all_form_ids\n if not out_partner:\n continue\n try:\n DispatchApp.delete().where(DispatchApp.application.in_(\n from_ids)).execute()\n dispatch_ins = [{'application': id, 'partner':\n current_bomber.partner_id, 'bomber': bid, 'status':\n DisAppStatus.NORMAL.value} for id in from_ids]\n DispatchApp.insert_many(dispatch_ins).execute()\n except Exception as e:\n logging.info(\n 'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str\n (e), bid, from_ids))\n\n\n<mask token>\n\n\ndef calc_entry_time(overdue_days):\n app_entry_time = {}\n overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],\n 'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}\n for key, value in overdue_entry.items():\n if value[0] <= overdue_days <= value[1]:\n app_entry_time[key] = datetime.now()\n else:\n app_entry_time[key] = None\n return app_entry_time\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_BOMBER_OVERDUE)\ndef summary_bomber_overdue_everyday(payload, msg_id):\n cycle_list = Cycle.values()\n which_day = date.today()\n for cycle in cycle_list:\n apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,\n ApplicationR.ptp_bomber, ApplicationR.overdue_days,\n ApplicationR.promised_date, ApplicationR.follow_up_date,\n ApplicationR.external_id, OverdueBillR.status, OverdueBillR.\n periods, OverdueBillR.sub_bill_id).join(OverdueBillR,\n JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id\n ).where(ApplicationR.status != ApplicationStatus.REPAID.value, \n ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()\n bomber_overdue_list = []\n for app in apps:\n status = app.get('status')\n if status == ApplicationStatus.REPAID.value:\n continue\n ptp_bomber = app.get('ptp_bomber')\n promised_date = app.get('promised_date')\n follow_up_date = app.get('follow_up_date')\n if not promised_date or promised_date.date() < date.today():\n ptp_bomber = promised_date = None\n if not follow_up_date or follow_up_date.date() < date.today():\n follow_up_date = None\n overdue_dict = {'collection_id': app.get('id'), 'external_id':\n app.get('external_id'), 'sub_bill_id': app.get(\n 'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app\n .get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':\n ptp_bomber, 'promised_date': promised_date,\n 'follow_up_date': follow_up_date, 'which_day': which_day,\n 'overdue_days': app.get('overdue_days')}\n bomber_overdue_list.append(overdue_dict)\n try:\n if bomber_overdue_list:\n with db.atomic():\n for index in range(0, len(bomber_overdue_list), 1000):\n insert_list = bomber_overdue_list[index:index + 1000]\n BomberOverdue.insert_many(insert_list).execute()\n except Exception as e:\n logging.info(\n 'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %\n (cycle, str(which_day), str(e)))\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)\ndef today_ptp_auto_call_switch(payload, msg_id):\n today = datetime.today().date()\n next_day = today + timedelta(days=1)\n apps = ApplicationR.select(ApplicationR.latest_bomber).where(\n ApplicationR.promised_date < next_day, ApplicationR.promised_date >=\n today, ApplicationR.promised_date.is_null(False), ApplicationR.\n status != ApplicationStatus.REPAID.value, ApplicationR.cycle <\n Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(\n ApplicationR.latest_bomber)\n bids = [a.latest_bomber_id for a in apps]\n if not bids:\n return\n q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(\n BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids\n ).execute()\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)\ndef update_today_switch_every_day(payload, msg_id):\n q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(\n BomberPtp.auto_ext.is_null(False)).execute()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\n@action(MessageAction.BOMBER_HEALTH_CHECK)\ndef health_check(payload, msg_id):\n pass\n\n\n<mask token>\n\n\ndef dpd1_process(lst):\n \"\"\"已废弃的方法\"\"\"\n if not lst:\n return\n for key, l in lst.items():\n rule = getattr(BeforeInBomber, key).value\n query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)\n ).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(\n rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.\n call_success()))\n success_set = {i.loanid for i in query}\n failed_list = list(set(l) - success_set)\n post_params = {'$and': rule.get('$and'), 'app_list': failed_list}\n resp = Hyperloop().post('/bomber/score/verify', json=post_params)\n if not resp.ok:\n logging.error('hyperloop score verification failed: %s, %s',\n str(resp.status_code), str(resp.text))\n logging.error('hyperloop score verification failed: %s', str(\n post_params))\n continue\n logging.debug('hyperloop score verification success: %s', resp.content)\n resp_json = resp.json()\n app_list = resp_json['data']\n if not app_list:\n continue\n for item in app_list:\n if random.randint(0, 5) == 1:\n send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':\n int(item)})\n\n\n@action(MessageAction.GET_IVR)\ndef get_ivr(payload, msg_id):\n logging.warning('start get_ivr')\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'DPD1-3_INTO_IVR').first()\n now = date.today()\n if sys_config and sys_config.value:\n start = now - timedelta(days=3)\n else:\n start = now\n end = now + timedelta(days=4)\n item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()\n if not item:\n AutoIVR.delete().execute()\n current_page = 0\n elif item.current_page >= item.total_page:\n return\n else:\n current_page = item.current_page\n auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,\n 'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,\n 'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':\n 10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,\n 'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,\n 'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,\n 'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,\n 'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,\n 'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,\n 'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,\n 'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,\n 'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,\n 'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,\n 'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':\n 101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,\n 'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,\n 'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,\n 'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}\n current_page += 1\n with db.atomic() as transaction:\n while True:\n bill_service = BillService()\n ivr_action = bill_service.ivr_pages(page=current_page,\n page_size=500, start_time=utc_datetime(str(start)),\n end_time=utc_datetime(str(end)))\n result = ivr_action['result']\n page_size = int(ivr_action.get('page_size', 0))\n total_page = int(ivr_action.get('total_page', 0))\n insert_args = []\n for a in result:\n due_at = no_utc_datetime(a['due_at'])\n days = (due_at.date() - now).days\n if days == 2:\n continue\n if days > 0:\n time = str(days)\n else:\n time = str(days).replace('-', 'PDP')\n key = a['app_name'] + str(a['su']) + time\n group = auto_ivr.get(key)\n user_id = a['user_id']\n try:\n user_resp = AccountService().get_user(path_params={\n 'user_id': user_id})\n if str(user_resp['mobile_no']) == str(a['user_mobile_no']):\n numbers = a['user_mobile_no']\n else:\n numbers = a['user_mobile_no'] + ',' + user_resp.get(\n 'mobile_no')\n except:\n logging.error('Request Account Service Error.')\n numbers = a['user_mobile_no']\n insert_args.append({'application_id': a['id'], 'numbers':\n numbers, 'group': group, 'user_id': user_id})\n AutoIVR.insert_many(insert_args).execute()\n if current_page == 1:\n IVRActionLog.create(total_page=total_page, proc_date=now,\n page_size=page_size, current_page=current_page)\n item = IVRActionLog.get(IVRActionLog.proc_date == now)\n else:\n item.current_page = current_page\n item.page_size = page_size\n item.total_page = total_page\n item.save()\n transaction.commit()\n current_page += 1\n if current_page > int(total_page):\n break\n if sys_config and sys_config.value:\n try:\n classfiy_dpd_ptp_apps()\n except Exception as e:\n logging.error('dpd1-3_test_error:%s' % str(e))\n\n\ndef ivr_t2_test():\n t2_groups = [39, 40, 41, 42, 43, 44]\n ivr_test_proportion = 0.2\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'IVR_TEST_PROPORTION').first()\n if sys_config and sys_config.value:\n ivr_test_proportion = float(sys_config.value)\n t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.\n status == AutoIVRStatus.AVAILABLE.value)\n t2_dict = defaultdict(list)\n for ivr in t2_ivrs:\n t2_dict[ivr.group].append(ivr.id)\n test_ivr_ids = []\n for group, ivr_ids in t2_dict.items():\n number = ceil(len(ivr_ids) * ivr_test_proportion)\n test_ivr_ids += ivr_ids[:number]\n if not test_ivr_ids:\n return\n q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.\n group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.APP_MERGE)\n@deprecated(version='1.0', reason='This function will be removed soon')\ndef app_merge(payload, msg_id):\n sql = \"\"\"\n select *\n from (\n select a.id as id\n from dashboard.application as a\n inner join repayment.bill2 as b on b.external_id = a.id\n where not exists (\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.app = 'DanaCepat'\n and a.is_first_loan = 1\n and a.apply_at < '2018-08-23 20:50:00'\n and b.overdue_days between 1 and 3\n and b.status != 2) result\n where not exists (\n select 1\n from bomber.application as a\n where a.cycle = 1\n and a.status = 4\n and a.id = result.id\n )\n \"\"\"\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n new_data = cursor.fetchall()\n cursor.close()\n if new_data:\n bomber = [103, 104]\n for d in new_data:\n app_id = {'id': d[0]}\n application_overdue(app_id, None)\n Application.update(status=ApplicationStatus.AB_TEST.value,\n latest_bomber=random.choice(bomber), ptp_bomber=None).where(\n Application.id == d[0]).execute()\n logging.warning('add new app success')\n ptp = date.today() - timedelta(days=1)\n del_sql = (\n \"\"\"\n select a.id\n from bomber.application as a\n where exists(\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.cycle = 1\n and a.status = 4\n and (a.promised_date is null or a.promised_date < \"%s\")\n \"\"\"\n % ptp)\n cursor = readonly_db.get_cursor()\n cursor.execute(del_sql)\n del_date = cursor.fetchall()\n cursor.close()\n if del_date:\n return\n ids = list()\n for d in del_date:\n ids.append(d[0])\n Application.update(status=ApplicationStatus.UNCLAIMED.value,\n latest_bomber=None).where(Application.id << ids).execute()\n\n\n@action(MessageAction.APPLICATION_BOMBER)\ndef application_overdue(payload, msg_id):\n application_id = payload['id']\n sub_bill_id = payload['bill_sub_id']\n local_app = Application.select().where(Application.external_id ==\n application_id).order_by(Application.finished_at).first()\n if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:\n logging.info('application %s overdue, already exists', application_id)\n add_contact(local_app)\n return\n if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:\n overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==\n sub_bill_id, OverdueBillR.external_id == application_id)\n if overdue_bill.exists():\n logging.info(\n 'application %s,sub_bill_id %s overdue, already exists' % (\n application_id, sub_bill_id))\n return\n try:\n sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])\n sub_bill = sub_bill[0]\n except Exception:\n logging.error(\n 'application %s overdue, get sub_bill info failed:Request To repayment Error'\n , application_id)\n return\n if sub_bill['status'] == 2:\n logging.error('application %s overdue, but bills already cleared',\n application_id)\n return\n overdue_days = sub_bill.get('overdue_days', 0)\n if overdue_days == 0:\n logging.info('application {} no overdue'.format(str(application_id)))\n return\n gold_eye = GoldenEye().get('/applications/%s' % application_id)\n if not gold_eye.ok:\n logging.error('get application %s failed: Request to GoldenEye.',\n application_id)\n return\n gold_app = gold_eye.json().get('data')\n user_id = gold_app['user_id']\n apply_history = Dashboard().get('/users/%s/apply-history' % user_id)\n if not apply_history.ok:\n logging.error(\n 'get user %s apply history failed: Request to Dashboard Failed.',\n user_id)\n return\n history = apply_history.json().get('data')\n loan_success_times = len([(1) for i in history if i['status'] in [80, \n 90, 100, 70] and i['id'] != gold_app['id']])\n id = application_id\n type = ApplicationType.CASH_LOAN.value\n bill_id = sub_bill.get('bill_id')\n amount = sub_bill.get('amount')\n amount_net = sub_bill.get('amount_net')\n interest_rate = sub_bill.get('interest_rate')\n overdue_days = sub_bill.get('overdue_days')\n origin_due_at = sub_bill.get('origin_due_at')\n sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,\n 'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),\n 'overdue_days': overdue_days, 'origin_due_at': origin_due_at,\n 'amount': amount, 'amount_net': amount_net, 'interest_rate':\n interest_rate, 'external_id': application_id}\n if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:\n if local_app and local_app.status != ApplicationStatus.REPAID.value:\n sub_overdue_bill['collection_id'] = local_app.id\n local_app.amount += amount\n local_app.amount_net += amount_net\n local_app.save()\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('application %s,sub_bill_id:%s overdue created' %\n (application_id, sub_bill_id))\n return\n else:\n id = idg()\n type = ApplicationType.CASH_LOAN_STAGING.value\n sub_overdue_bill['collection_id'] = id\n ptp_info = BombingHistory.filter(BombingHistory.application == id).first()\n promised_amount = ptp_info and ptp_info.promised_amount\n promised_date = ptp_info and ptp_info.promised_date\n application = Application.create(id=id, user_id=gold_app['user_id'],\n user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[\n 'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],\n contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(\n 'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=\n birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(\n gold_app.get('id_ektp')), profile_province=(gold_app.get(\n 'profile_province') or {}).get('name'), profile_city=(gold_app.get(\n 'profile_city') or {}).get('name'), profile_district=(gold_app.get(\n 'profile_district') or {}).get('name'), profile_residence_time=\n gold_app.get('profile_residence_time'), profile_residence_type=\n gold_app.get('profile_residence_type'), profile_address=gold_app.\n get('profile_address'), profile_education=gold_app.get(\n 'profile_education'), profile_college=(gold_app.get(\n 'profile_college') or {}).get('name'), job_name=gold_app.get(\n 'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get\n ('job_bpjs'), job_user_email=gold_app.get('job_user_email'),\n job_type=gold_app.get('job_type'), job_industry=gold_app.get(\n 'job_industry'), job_department=gold_app.get('job_department'),\n job_province=(gold_app.get('job_province') or {}).get('name'),\n job_city=(gold_app.get('job_city') or {}).get('name'), job_district\n =(gold_app.get('job_district') or {}).get('name'), job_address=\n gold_app.get('job_address'), amount=amount, amount_net=amount_net,\n interest_rate=interest_rate, term=gold_app.get('term'),\n origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=\n sub_bill.get('repay_at'), loan_success_times=loan_success_times,\n arrived_at=datetime.now(), follow_up_date=datetime.now(),\n promised_amount=promised_amount, promised_date=promised_date,\n external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=\n datetime.now())\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('overdue application %s created', application_id)\n Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,\n status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)\n add_contact(application)\n\n\ndef add_contact(application):\n logging.info('start add contact for application: %s', application.id)\n contacts = Contact.filter(Contact.user_id == application.user_id)\n existing_numbers = {contact.number for contact in contacts}\n insert_contacts = list()\n mon_insert_contact = {}\n user_mobile_no = number_strip(application.user_mobile_no)\n if user_mobile_no and user_mobile_no not in existing_numbers:\n insert_contacts.append({'user_id': application.user_id, 'name':\n application.user_name, 'number': user_mobile_no, 'relationship':\n Relationship.APPLICANT.value, 'source': 'apply info',\n 'real_relationship': Relationship.APPLICANT.value})\n existing_numbers.add(number_strip(application.user_mobile_no))\n extra_phone = GoldenEye().get('/users/%s/extra-phone' % application.user_id\n )\n if not extra_phone.ok:\n extra_phone = []\n logging.error('get user %s extra contacts failed', application.user_id)\n else:\n extra_phone = extra_phone.json()['data']\n if extra_phone:\n for i in extra_phone:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({'user_id': application.user_id, 'name':\n application.user_name, 'number': number, 'relationship':\n Relationship.APPLICANT.value, 'source': 'extra phone',\n 'real_relationship': Relationship.APPLICANT.value})\n key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n ec_contact = []\n contact = json.loads(application.contact or '[]')\n for i in contact:\n if number_strip(i['mobile_no']\n ) not in existing_numbers and number_strip(i['mobile_no']):\n ec_contact.append({'user_id': application.user_id, 'name': i[\n 'name'], 'number': number_strip(i['mobile_no']),\n 'relationship': Relationship.FAMILY.value, 'sub_relation':\n SubRelation.EC.value, 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number_strip(i['mobile_no']\n ), ContactType.F_EC.value\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['mobile_no']))\n if i['type'] != 1:\n continue\n if number_strip(i['tel_no']) not in existing_numbers and number_strip(i\n ['tel_no']):\n ec_contact.append({'user_id': application.user_id, 'name': i[\n 'name'], 'number': number_strip(i['tel_no']),\n 'relationship': Relationship.FAMILY.value, 'sub_relation':\n SubRelation.EC.value, 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number_strip(i['tel_no']\n ), ContactType.F_EC.value\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['tel_no']))\n if ec_contact:\n Contact.insert_many(ec_contact).execute()\n if all((application.job_tel, number_strip(application.job_tel), \n number_strip(application.job_tel) not in existing_numbers)):\n insert_contacts.append({'user_id': application.user_id, 'name':\n None, 'number': number_strip(application.job_tel),\n 'relationship': Relationship.COMPANY.value, 'source':\n 'basic info job_tel', 'real_relationship': Relationship.COMPANY\n .value})\n key = user_mobile_no, number_strip(application.job_tel\n ), ContactType.C_BASIC_INFO_JOB_TEL.value\n mon_insert_contact[key] = 1, 0, None\n existing_numbers.add(number_strip(application.job_tel))\n sms_contacts = GoldenEye().get('/applications/%s/sms-contacts' %\n application.external_id)\n if not sms_contacts.ok:\n sms_contacts = []\n logging.info('get user %s sms contacts failed', application.external_id\n )\n else:\n sms_contacts = sms_contacts.json()['data']\n if sms_contacts:\n for i in sms_contacts:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({'user_id': application.user_id, 'name':\n i['name'][:128], 'number': number, 'relationship':\n Relationship.SUGGESTED.value, 'source': 'sms contacts',\n 'real_relationship': Relationship.SUGGESTED.value})\n key = user_mobile_no, number, ContactType.S_SMS_CONTACTS.value\n mon_insert_contact[key] = 1, 0, i['name'][:128]\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n cf = GoldenEye().get('/applications/%s/call/frequency' % application.\n external_id)\n if not cf.ok:\n call_frequency = []\n logging.error('get application %s call frequency error',\n application.external_id)\n else:\n call_frequency = cf.json()['data']\n insert_contacts = []\n fm = GoldenEye().get('/applications/%s/contact/family-member' %\n application.external_id)\n if not fm.ok:\n family = []\n logging.error('get application %s family-member info error',\n application.external_id)\n else:\n family = fm.json()['data']\n if family:\n for i in family:\n if not i.get('number'):\n logging.info('family member %s' % str(i))\n continue\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n logging.info('family members: %s' % str(i))\n insert_contacts.append({'user_id': application.user_id, 'name':\n i['name'][:128], 'number': number, 'relationship':\n Relationship.FAMILY.value, 'source': FamilyContactType.\n CALLEC.value, 'total_count': i.get('total_count', 1),\n 'total_duration': i.get('total_duration', 0),\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number, ContactType.F_CALL_EC.value\n mon_insert_contact[key] = i.get('total_count', 1), i.get(\n 'total_duration', 0), i['name'][:128]\n existing_numbers.add(number)\n mon_update_contact = {}\n if call_frequency:\n with db.atomic():\n count = 1\n for i in call_frequency:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n Contact.update(total_count=i['total_count'],\n total_duration=i['total_duration']).where(Contact.\n number == number, Contact.user_id == application.\n user_id)\n key = user_mobile_no, number\n mon_update_contact[key] = i['total_count'], i[\n 'total_duration']\n continue\n if count < 6:\n insert_contacts.append({'user_id': application.user_id,\n 'name': i['name'][:128], 'number': number,\n 'relationship': Relationship.FAMILY.value,\n 'total_count': i['total_count'], 'total_duration':\n i['total_duration'], 'source': FamilyContactType.\n CALLTOP5.value, 'real_relationship': Relationship.\n FAMILY.value})\n count += 1\n key = user_mobile_no, number, ContactType.F_CALL_TOP5.value\n mon_insert_contact[key] = i['total_count'], i[\n 'total_duration'], i['name'][:128]\n else:\n insert_contacts.append({'user_id': application.user_id,\n 'name': i['name'][:128], 'number': number,\n 'relationship': Relationship.SUGGESTED.value,\n 'total_count': i['total_count'], 'total_duration':\n i['total_duration'], 'source': 'call frequency',\n 'real_relationship': Relationship.SUGGESTED.value})\n key = (user_mobile_no, number, ContactType.\n S_CALL_FREQUENCY.value)\n mon_insert_contact[key] = i['total_count'], i[\n 'total_duration'], i['name'][:128]\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n next_apply_list = AccountService().add_contact(application.user_id)\n for next_apply in next_apply_list:\n number = number_strip(str(next_apply))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=application.\n user_name, number=number, relationship=Relationship.\n SUGGESTED.value, source='online profile phone',\n real_relationship=Relationship.SUGGESTED.value)\n key = (user_mobile_no, number, ContactType.\n S_ONLINE_PROFILE_PHONE.value)\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n next_applicant = GoldenEye().get('/bomber/%s/dual_contact' %\n application.user_id)\n if not next_applicant.ok:\n next_applicant = []\n logging.error('get user %s dual_contact contacts failed' %\n application.user_id)\n else:\n next_applicant = next_applicant.json()['data']\n if next_applicant:\n for i in next_applicant:\n number = number_strip(str(i))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=\n application.user_name, number=number, relationship=\n Relationship.APPLICANT.value, source='apply info',\n real_relationship=Relationship.APPLICANT.value)\n key = user_mobile_no, number, ContactType.A_APPLY_INFO.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n numbers = []\n try:\n numbers = AccountService().ktp_number(path_params={'user_id':\n application.user_id})\n except Exception as e:\n logging.info('request ktp numbers failed %s' % str(e))\n for n in numbers:\n number = number_strip(str(n))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=application.\n user_name, number=number, relationship=Relationship.\n APPLICANT.value, source='ktp number', real_relationship=\n Relationship.APPLICANT.value)\n key = user_mobile_no, number, ContactType.A_KTP_NUMBER.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n try:\n ecs = GoldenEye().get('/applications/%s/contact/ec' % application.\n external_id)\n except Exception as e:\n logging.info('request ec-member error: %s' % str(e))\n try:\n if not ecs.ok:\n ec = []\n logging.info('get application %s ec-member info error',\n application.external_id)\n else:\n ec = ecs.json()['data']\n if ec:\n for e in ec:\n number = number_strip(e['numbers'])[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=e[\n 'name'][:128], number=number, relationship=\n Relationship.FAMILY.value, source=FamilyContactType\n .CONTACTEC.value, real_relationship=Relationship.\n FAMILY.value)\n key = (user_mobile_no, number, ContactType.F_CONTACT_EC\n .value)\n mon_insert_contact[key] = 1, 0, e['name'][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add ec_member error:%s' % str(e))\n try:\n mn = GoldenEye().get('/applications/%s/contact/my_number' %\n application.external_id)\n except Exception as e:\n logging.info('request my_number error: %s' % str(e))\n try:\n if not mn.ok:\n my = []\n logging.info('get application %s my_number info error',\n application.external_id)\n else:\n my = mn.json()['data']\n if my:\n for m in my:\n number = number_strip(m)[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=my[m][\n :128], number=number, relationship=Relationship.\n SUGGESTED.value, source='my number',\n real_relationship=Relationship.SUGGESTED.value)\n key = user_mobile_no, number, ContactType.S_MY_NUMBER.value\n mon_insert_contact[key] = 1, 0, my[m][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add my_member error:%s' % str(e))\n try:\n cn = GoldenEye().get('/applications/%s/contact/company-number' %\n application.external_id)\n except Exception as e:\n logging.info('request company-number error: %s' % str(e))\n try:\n if not cn.ok:\n cn = []\n logging.info('get application %s company_number info error',\n application.external_id)\n else:\n cn = cn.json()['data']\n if cn:\n for c in cn:\n number = c\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=cn[c][\n :128], number=number, relationship=Relationship.\n COMPANY.value, source='company', real_relationship=\n Relationship.COMPANY.value)\n key = user_mobile_no, number, ContactType.C_COMPANY.value\n mon_insert_contact[key] = 1, 0, cn[c][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add company_member error:%s' % str(e))\n try:\n ol = AccountService().other_login_contact(userId=application.user_id)\n except Exception as e:\n logging.error('request other_login error: %s' % e)\n ol = {}\n try:\n for o in ol:\n number = number_strip(o)\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=ol[o][:128\n ], number=number, relationship=Relationship.SUGGESTED.\n value, source='other_login', real_relationship=\n Relationship.SUGGESTED.value)\n key = user_mobile_no, number, ContactType.S_OTHER_LOGIN.value\n mon_insert_contact[key] = 1, 0, ol[o][:128]\n except Exception as e:\n logging.error('add other_login number error:%s' % e)\n logging.info('add contact for application %s finished', application.id)\n if mon_insert_contact or mon_update_contact:\n send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON, {\n 'user_mobile_no': user_mobile_no, 'insert_contact': str(\n mon_insert_contact), 'update_contact': str(mon_update_contact),\n 'user_id': application.user_id, 'name': application.user_name})\n\n\n<mask token>\n\n\ndef get_contact_from_mongo(number):\n if not number:\n return []\n query = TotalContact.objects(src_number=number, source__in=TotalContact\n .available()).order_by('source')\n lst = []\n for c in query:\n relation = TotalContact.relationship(c.source)\n if relation == -1:\n continue\n source = TotalContact.str_source(c.source)\n if not source:\n continue\n lst.append({'related_number': c.dest_number, 'source': source,\n 'is_calc': c.is_calc, 'total_count': c.total_count,\n 'total_duration': c.total_duration, 'relation': relation,\n 'name': c.dest_name})\n return lst\n\n\n<mask token>\n\n\ndef check_key_not_none(payload, keys):\n for key in keys:\n if payload.get(key) is None:\n logging.error('Missing args {}'.format(str(key)))\n return False\n return True\n\n\n@action(MessageAction.BILL_PAID)\ndef bill_paid(payload, msg_id):\n validate = check_key_not_none(payload, ['external_id', 'late_fee_part',\n 'principal_part', 'paid_at', 'bill_sub_id', 'partner_bill_id'])\n if not validate:\n logging.error('payload key not fully pass in.')\n return\n external_id = payload['external_id']\n late_fee_part = Decimal(payload['late_fee_part'])\n principal_part = Decimal(payload['principal_part'])\n paid_at = payload['paid_at']\n partner_bill_id = payload['partner_bill_id']\n logging.debug(\n 'application %s paid principal part %s, paid late fee part %s',\n external_id, principal_part, late_fee_part)\n application = Application.filter(Application.external_id == external_id\n ).order_by(-Application.created_at).first()\n if not application:\n logging.info('application %s paid, not found application', external_id)\n return\n sub_bill_id = payload['bill_sub_id']\n overdue_bill = OverdueBillR.select().where(OverdueBillR.collection_id ==\n application.id, OverdueBillR.sub_bill_id == sub_bill_id).first()\n if (application.type == ApplicationType.CASH_LOAN_STAGING.value and not\n overdue_bill):\n logging.info('bill sub not in bomber %s', sub_bill_id)\n return\n with db.atomic():\n repay_at = str_no_utc_datetime(payload['latest_repay_at'])\n Application.update(repay_at=repay_at).where(Application.id ==\n application.id).execute()\n RepaymentLog.create(application=application.id, is_bombed=True,\n current_bomber=application.latest_bomber_id, cycle=application.\n cycle, principal_part=principal_part, late_fee_part=\n late_fee_part, repay_at=paid_at, ptp_bomber=application.\n ptp_bomber, latest_call=application.latest_call, periods=\n overdue_bill.periods if overdue_bill else None, overdue_bill_id\n =overdue_bill.id if overdue_bill else None, partner_bill_id=\n partner_bill_id)\n phone_status = PhoneStatus.CONNECTED.value\n real_relationship = RealRelationship.user_values()\n commit = CallActionCommit.NO.value\n number = CallActions.select(CallActions.number).where(CallActions.\n phone_status == phone_status, CallActions.real_relationship <<\n real_relationship, CallActions.commit == commit, CallActions.\n application == application.id).order_by(-CallActions.created_at\n ).first()\n if number:\n Contact.update(call_priority=PriorityStatus.REPAY.value).where(\n Contact.user_id == application.user_id, Contact.\n call_priority == PriorityStatus.LAST.value).execute()\n Contact.update(call_priority=PriorityStatus.LAST.value).where(\n Contact.user_id == application.user_id, Contact.number ==\n number.number).execute()\n if not application.latest_bomber_id:\n return\n Inbox.create(title='application %s,sub_bill_id %s repaid' % (\n application.external_id, sub_bill_id), content=\n 'application %s,sub_bill_id %s repaid' % (application.\n external_id, sub_bill_id), receiver=application.\n latest_bomber_id or application.last_bomber_id, category=\n InboxCategory.REPAID.value)\n\n\n@action(MessageAction.BILL_RELIEF)\ndef bill_relief(payload, msg_id):\n \"\"\"已废弃\"\"\"\n bill = payload['head_bill']\n repay_at = str_no_utc_datetime(bill['latest_repay_at'])\n updated_row = Application.update(repay_at=repay_at).where(Application.\n id == bill['external_id']).execute()\n logging.info('application %s bill relief done', bill['external_id'])\n return updated_row\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)\ndef calc_overdue_days_over(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query = Application.update(overdue_days=overdue_days).where(Application\n .status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.\n overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)\n updated_rows_count = query.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count)\n try:\n calc_overdue_days_over_instalment()\n except Exception as e:\n logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))\n apps = Application.filter(Application.status << [ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value], Application.overdue_days > 95, \n Application.promised_date.is_null(True) | (fn.DATE(Application.\n promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {\n 'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n\n\ndef calc_overdue_days_over_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days > 95)\n updated_rows_count = query.execute()\n logging.info(\n 'calc_overdue_days_over_instalment done,count:%s,status:%s' % (\n updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.collection_id,\n OverdueBill.overdue_days).join(Application, JOIN_LEFT_OUTER, on\n =OverdueBill.collection_id == Application.id).where(Application\n .status == status, Application.type == ApplicationType.\n CASH_LOAN_STAGING.value)\n app_update = {}\n for ob in overdue_bills:\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)\ndef calc_overdue_days(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query_unclaimed = Application.update(overdue_days=overdue_days).where(\n Application.status == ApplicationStatus.UNCLAIMED.value, \n Application.overdue_days <= 95, Application.type == ApplicationType\n .CASH_LOAN.value)\n updated_rows_count_unclaimed = query_unclaimed.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_unclaimed)\n query_processing = Application.update(overdue_days=overdue_days).where(\n Application.status == ApplicationStatus.PROCESSING.value, \n Application.overdue_days <= 95, Application.type == ApplicationType\n .CASH_LOAN.value)\n updated_rows_count_processing = query_processing.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_processing)\n query_test = Application.update(overdue_days=overdue_days).where(\n Application.status == ApplicationStatus.AB_TEST.value, Application.\n overdue_days <= 95, Application.type == ApplicationType.CASH_LOAN.value\n )\n updated_rows_count_test = query_test.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_test)\n calc_overdue_days_instalment()\n apps = Application.select(Application.id).where(Application.status << [\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value, ApplicationStatus.AB_TEST.value], Application.overdue_days <=\n 95, Application.promised_date.is_null(True) | (fn.DATE(Application.\n promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {\n 'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n Application.update(C1A_entry=datetime.now()).where(Application.status <<\n [ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value, ApplicationStatus.AB_TEST.value], Application.overdue_days == 4\n ).execute()\n\n\ndef calc_overdue_days_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n today_now_time = datetime.now()\n month_first_day = today_now_time.replace(day=1, hour=1, minute=30,\n second=0, microsecond=0)\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days <= 95)\n updated_rows_count = query.execute()\n logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %\n (updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.\n created_at, OverdueBill.collection_id, OverdueBill.overdue_days\n ).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.\n collection_id == Application.id).where(Application.status ==\n status, Application.type == ApplicationType.CASH_LOAN_STAGING.value\n )\n app_update = {}\n for ob in overdue_bills:\n if (ob.status == ApplicationStatus.REPAID.value and ob.\n created_at < month_first_day):\n continue\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)\ndef automatic_escalation(payload, msg_id):\n app_ids = payload.get('application_list', [])\n if not app_ids:\n return\n apps = Application.select().where(Application.id.in_(app_ids), \n Application.status != ApplicationStatus.REPAID.value)\n for a in apps:\n new_cycle = application_entry_different_calculations(a)\n if a.overdue_days < 90:\n logging.info(\n 'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'\n .format(a.id, new_cycle, a.cycle, a.overdue_days))\n if new_cycle > a.cycle:\n with db.atomic():\n if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle\n .C1B.value):\n bomber_id = (a.latest_bomber_id if a.latest_bomber_id else\n a.cycle)\n DispatchAppHistory.update(out_at=datetime.now(),\n out_overdue_days=a.overdue_days).where(\n DispatchAppHistory.application == a.id, \n DispatchAppHistory.bomber_id == bomber_id).execute()\n Escalation.create(application=a.id, type=EscalationType.\n AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,\n current_cycle=a.cycle, escalate_to=new_cycle,\n current_bomber_id=a.latest_bomber)\n dis_app_update = DispatchApp.update(status=DisAppStatus.\n ABNORMAL.value).where(DispatchApp.application == a.id)\n dis_app_update.execute()\n a.cycle = new_cycle\n a.last_bomber = a.latest_bomber\n a.status = ApplicationStatus.UNCLAIMED.value\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n if new_cycle == Cycle.C1B.value:\n a.C1B_entry = datetime.now()\n elif new_cycle == Cycle.C2.value:\n a.C2_entry = datetime.now()\n elif new_cycle == Cycle.C3.value:\n a.C3_entry = datetime.now()\n a.save()\n logging.info('automatic escalation done')\n\n\ndef application_entry_different_calculations(app):\n conf = {(1): [1, 10], (2): [11, 30], (3): [31, 60], (4): [61, 90], (5):\n [91, 999999]}\n for new_cycle, scopes in conf.items():\n if scopes[0] <= app.overdue_days <= scopes[1]:\n return new_cycle\n return app.cycle\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY)\ndef cron_summary(payload, msg_id):\n \"\"\"已废弃\"\"\"\n employees = Bomber.select(Bomber, Role).join(Role)\n summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,\n 'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,\n 'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':\n 0, 'sms_sent': 0} for i in employees}\n now_date = date.today()\n cal_date = now_date - timedelta(days=1)\n claimed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('claimed')).where(fn.DATE(Application.\n claimed_at) == cal_date, Application.status << [ApplicationStatus.\n PROCESSING.value, ApplicationStatus.REPAID.value], Application.\n latest_bomber.is_null(False)).group_by(Application.latest_bomber)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False)).group_by(\n Application.latest_bomber)\n completed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('completed')).where(Application.\n latest_bombing_time.is_null(False), fn.DATE(Application.\n latest_bombing_time) == cal_date, Application.latest_bomber.is_null\n (False)).group_by(Application.latest_bomber)\n escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(\n Escalation.id).alias('escalated')).where(fn.DATE(Escalation.\n created_at) == cal_date, Escalation.type == EscalationType.\n AUTOMATIC.value, Escalation.current_bomber.is_null(False), \n Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation\n .current_bomber)\n transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).\n alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==\n cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(\n Transfer.operator)\n promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(\n BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.\n created_at) == cal_date, BombingHistory.result == BombingResult.\n HAS_PROGRESS.value).group_by(BombingHistory.bomber)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,\n RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.\n current_bomber)\n calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id\n ).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==\n cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)\n calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.\n record_id).alias('calls_connected')).where(fn.DATE(CallLog.\n time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==\n '1').group_by(CallLog.user_id)\n sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(\n ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_\n (ConnectType.sms()), ConnectHistory.created_at >= cal_date, \n ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)\n for i in claimed:\n summary[i.latest_bomber_id]['claimed'] += i.claimed\n for i in completed:\n summary[i.latest_bomber_id]['completed'] += i.completed\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n for i in escalated:\n summary[i.current_bomber_id]['escalated'] += i.escalated\n for i in transferred:\n summary[i.operator_id]['transferred'] += i.transferred\n for i in promised:\n summary[i.bomber_id]['promised'] += i.promised\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in calls_made:\n summary[int(i.user_id)]['calls_made'] += i.calls_made\n for i in calls_connected:\n summary[int(i.user_id)]['calls_connected'] += i.calls_connected\n for i in sms_sent:\n summary[i.operator_id]['sms_sent'] += i.sms_sent\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'claimed': data['claimed'], 'completed': data['completed'],\n 'cleared': data['cleared'], 'escalated': data['escalated'],\n 'transferred': data['transferred'], 'promised': data['promised'\n ], 'amount_recovered': data['amount_recovered'], 'calls_made':\n data['calls_made'], 'calls_connected': data['calls_connected'],\n 'sms_sent': data['sms_sent'], 'date': cal_date})\n if insert_args:\n Summary.insert_many(insert_args).execute()\n cycle_args = []\n escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(\n Escalation.id).alias('escalated_in')).where(Escalation.status ==\n ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==\n cal_date).group_by(Escalation.escalate_to)\n for i in escalated_in:\n cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.\n escalated_in, 'date': cal_date})\n amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM\n (RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)\n for i in amount_recovered_total:\n amount_recovered_total = i.principal_part + i.late_fee_part\n cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':\n amount_recovered_total, 'date': cal_date})\n if cycle_args:\n Summary.insert_many(cycle_args).execute()\n logging.info('cal summary done')\n send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY2)\ndef cron_summary2(payload, msg_id):\n \"\"\"已废弃,定时任务还在执行,具体情况待确定\"\"\"\n cal_date = date.today() - timedelta(days=1)\n employees = Bomber.select(Bomber, Role).join(Role)\n auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,\n AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')\n ).where(fn.DATE(AutoCallActions.created_at) == cal_date)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.\n is_null(False), RepaymentLog.is_bombed == True)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False))\n auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,\n AutoCallActions.result)\n amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)\n cleared = cleared.group_by(Application.latest_bomber)\n summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,\n 'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered': \n 0} for e in employees}\n for a in auto_call_actions:\n summary[a.bomber_id]['answered_calls'] += a.count\n if a.result == AutoCallResult.PTP.value:\n summary[a.bomber_id]['ptp'] += a.count\n if a.result == AutoCallResult.FOLLOW_UP.value:\n summary[a.bomber_id]['follow_up'] += a.count\n if a.result == AutoCallResult.NOT_USEFUL.value:\n summary[a.bomber_id]['not_useful'] += a.count\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'answered_calls': data['answered_calls'], 'ptp': data['ptp'],\n 'follow_up': data['follow_up'], 'not_useful': data['not_useful'\n ], 'cleared': data['cleared'], 'amount_recovered': str(data[\n 'amount_recovered']), 'date': cal_date})\n if insert_args:\n Summary2.insert_many(insert_args).execute()\n\n\n@action(MessageAction.BOMBER_SYNC_CONTACTS)\ndef sync_suggested_contacts(payload, msg_id):\n \"\"\" suggested contacts sync \"\"\"\n applications = Application.select(Application.id, Application.user_id\n ).where(Application.status << [ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value])\n logging.debug('start sync contact')\n for a in applications:\n sync_contacts(a)\n logging.info('contact sync finished')\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_DISCOUNT_APPROVED)\ndef bomber_discount_approved(payload, msg_id):\n app_id = payload['id']\n msg_type = payload['msg_type']\n discount_to = payload['discount_to']\n effective_to = payload['effective_to']\n application = Application.filter(Application.id == app_id).first()\n if not application:\n logging.error(\n 'discount approved msg send failed application %s not found',\n app_id)\n return\n template = Template.select(Template.text, Template.app).where(Template.\n type == ConnectType.AUTO_SMS.value, Template.id << Template.\n get_auto_sms_tpl(msg_type), Template.app == application.app).first()\n if not template:\n logging.error('discount approved msg send failed template %s not found'\n , msg_type)\n return\n promised_date = None\n if application.promised_date:\n promised_date = application.promised_date.strftime('%d-%m-%Y')\n tpl_data = {'user_name': application.user_name, 'due_days': application\n .overdue_days, 'app_name': application.app, 'phone': application.\n user_mobile_no, 'cs_number': cs_number_conf.get(application.app,\n '02150202889'), 'promised_date': promised_date, 'discount_to':\n discount_to, 'effective_to': effective_to}\n content = template.text.format(**tpl_data)\n data_list = [{'receiver': '62' + application.user_mobile_no, 'content':\n content, 'title': ''}]\n send_sms(data_list, msg_type, application.app)\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST)\ndef bomber_auto_call_list(payload, msg_id):\n with db.atomic():\n bomber_dispatch_app()\n dispatch_instalment_app()\n dis_apps = DispatchApp.select(DispatchApp.application).where(\n DispatchApp.status == DisAppStatus.NORMAL.value)\n c1_apps = Application.select(Application.id, Application.cycle,\n Application.follow_up_date, Application.called_times).where(Application\n .status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.\n AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.\n is_rejected == False, Application.promised_date.is_null(True) | (fn\n .DATE(Application.promised_date) < datetime.today().date())).order_by(\n Application.overdue_days, Application.apply_at)\n dis_apps_ids = [da.application_id for da in dis_apps]\n insert_args = []\n for a in c1_apps:\n if a.id in dis_apps_ids:\n continue\n insert_args.append({'application': a.id, 'cycle': a.cycle,\n 'follow_up_date': a.follow_up_date, 'called_times': 1 if a.\n called_times else 0, 'description': 'init'})\n if not insert_args:\n logging.error('no application need auto call')\n with db.atomic():\n AutoCallList.delete().execute()\n for idx in range(0, len(insert_args), 100):\n AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()\n for idx in range(0, len(insert_args), 100):\n application_list = [i['application'] for i in insert_args[idx:idx +\n 100]]\n send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {\n 'application_list': application_list})\n logging.info('bomber generate auto call list finished')\n send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})\n\n\nclass ChangeBomberTool(object):\n\n @staticmethod\n def in_record(bomber_id, ids, bd):\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.\n alias('application_id'), R(str(bomber_id)).alias('bomber_id'),\n fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(\n 'expected_out_time'), Application.overdue_days.alias(\n 'entry_overdue_days')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << ids)\n Application.update(latest_bomber=bomber_id).where(Application.id.\n in_(ids)).execute()\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bd),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n @staticmethod\n def out_record(a, bd):\n _id = str(a.id)\n DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a\n .overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]\n .get('principal_paid')), out_late_fee_pending=bd[_id].get(\n 'late_fee') - bd[_id].get('late_fee_paid')).where(\n DispatchAppHistory.application == a.id, DispatchAppHistory.\n bomber_id == a.latest_bomber_id).execute()\n a.last_bomber = a.latest_bomber\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n a.save()\n\n @staticmethod\n def classify(l, b):\n if len(l) == 1:\n return l[0]\n _l = filter(lambda x: x['bomber'] != b, l)\n return min(_l, key=lambda x: len(x['ids']))\n\n\n<mask token>\n\n\ndef bomber_dispatch_app():\n try:\n c1a_dispatch_app()\n except Exception as e:\n logging.error('c1a_dispatch_app error:%s' % str(e))\n cycle = {(1): 10, (2): 30, (3): 60, (4): 90}\n apps = Application.select().where(fn.DATE(Application.C2_entry) == date\n .today(), Application.type == ApplicationType.CASH_LOAN.value)\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C2.value)\n apps_ids = [a.id for a in apps]\n dispatch_inserts = []\n start_index = 0\n apps_length = len(apps_ids)\n logging.warning('apps length %s' % str(apps_length))\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)\n gen = CycleIter([b.id for b in bombers])\n existing_list = []\n end_index = start_index + int(apps_length * p.app_percentage)\n logging.info('partner length %s' % str(end_index))\n if not apps_ids[start_index:end_index]:\n continue\n bills = BillService().bill_list(application_ids=apps_ids[\n start_index:end_index])\n bill_dict = {bill['application_id']: bill for bill in bills}\n for a_id in apps_ids[start_index:end_index]:\n bomber = average_gen(gen, existing_list)\n q = DispatchApp.delete().where(DispatchApp.application == a_id\n ).execute()\n dispatch_inserts.append({'application': a_id, 'bomber': bomber,\n 'partner': p.id})\n application = Application.select().where(Application.id == a_id\n ).first()\n application.latest_bomber = bomber\n application.status = ApplicationStatus.AB_TEST.value\n application.ptp_bomber = None\n application.save()\n day_next_cycle = cycle.get(application.cycle\n ) - application.overdue_days\n DispatchAppHistory.create(application=a_id, partner_id=p.id,\n bomber_id=bomber, entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - Decimal(\n bill_dict[a_id].get('principal_paid')),\n entry_late_fee_pending=Decimal(bill_dict[a_id].get(\n 'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')\n ), expected_out_time=date.today() + timedelta(days=\n day_next_cycle))\n start_index = end_index\n with db.atomic():\n for idx in range(0, len(dispatch_inserts), 100):\n DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()\n config = SystemConfig.prefetch(SCI.AB_TEST_C2)\n c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)\n c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)\n c2 = apps_ids[start_index:]\n if c2:\n bills = BillService().bill_list(application_ids=c2)\n else:\n bills = []\n bill_dict = {bill['application_id']: bill for bill in bills}\n logging.info('c2 AB_test length: %s' % str(c2))\n gen = CycleIter(c2_bomber)\n existing_list = []\n for c in c2:\n bomber = average_gen(gen, existing_list)\n application = Application.filter(Application.id == c).first()\n application.status = ApplicationStatus.AB_TEST.value\n application.latest_bomber = bomber\n application.ptp_bomber = None\n application.save()\n day_next_cycle = 46 - application.overdue_days\n DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at\n =datetime.now(), entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - bill_dict[c].get(\n 'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(\n 'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),\n expected_out_time=date.today() + timedelta(days=day_next_cycle))\n ab_test_other()\n\n\ndef c1a_dispatch_app():\n today = datetime.today().date()\n tomorrow = today + timedelta(days=1)\n c1a_apps = Application.select().where(Application.status << [\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value], Application.dpd1_entry >= today, Application.dpd1_entry <\n tomorrow, Application.type == ApplicationType.CASH_LOAN.value)\n all_aids = [a.id for a in c1a_apps]\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C1A.value)\n end = 0\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n is_del == 0)\n start = end\n end += int(len(all_aids) * p.app_percentage)\n aids = all_aids[start:end]\n bids = [b.id for b in bombers]\n if not bids or not aids:\n continue\n average_number = get_average_number(len(aids), len(bids))\n p_end = 0\n for i, bid in enumerate(bids):\n p_start = p_end\n p_end += average_number[i]\n b_aids = aids[p_start:p_end]\n with db.atomic():\n q = Application.update(latest_bomber=bid, status=\n ApplicationStatus.AB_TEST.value).where(Application.id <<\n b_aids).execute()\n params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,\n 'application_ids': b_aids, 'dest_bomber_id': bid}\n new_in_record(**params)\n try:\n dispatch_inserts = []\n for aid in b_aids:\n dispatch_inserts.append({'application': aid, 'bomber':\n bid, 'partner': p.id, 'status': DisAppStatus.NORMAL\n .value})\n if dispatch_inserts:\n q = DispatchApp.insert_many(dispatch_inserts).execute()\n except Exception as e:\n logging.error('c1a分件写入dispatch_app error:%s' % str(e))\n\n\n<mask token>\n\n\ndef get_cash_bomber(bids, cycle):\n cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==\n 0, Bomber.instalment != cycle)\n cash_bids = [b.id for b in cash_bombers]\n return cash_bids\n\n\n<mask token>\n\n\ndef out_and_in_record_instalment(**kwargs):\n if not kwargs.get('application_ids'):\n return\n out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(\n DispatchAppHistory.application << kwargs['application_ids'],\n DispatchAppHistory.out_at.is_null(True)).execute()\n cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}\n period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')\n kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(kwargs['dest_bomber_id'])).alias(\n 'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.\n alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).\n alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << kwargs[\n 'application_ids'])\n application_list = list(subquery)\n for idx in range(0, len(application_list), 50):\n applications = application_list[idx:idx + 50]\n app_ids = [i.application_id for i in applications]\n overdue_bills = OverdueBill.select().where(OverdueBill.\n collection_id << app_ids)\n sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]\n bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)\n insert_args = lambad_instalment_result(bill_list, applications)\n if not insert_args:\n continue\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef c1b_dispatch_in_record(**kwargs):\n app_ids = kwargs.get('apps')\n partner_id = kwargs.get('partner_id', 'null')\n bill_dict = kwargs.get('bill_dict')\n period = kwargs.get('period')\n bomber_id = kwargs.get('bomber_id')\n if not all([app_ids, partner_id, bill_dict, period]):\n return False\n bill_dict = {str(k): v for k, v in bill_dict.items()}\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().\n alias('entry_at'), Application.overdue_days.alias(\n 'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.id << app_ids)\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_CONTACT)\ndef bomber_auto_call_contact(payload, msg_id):\n application_list = payload['application_list']\n applications = []\n for app_id in application_list:\n applications.append(Application.filter(Application.id == app_id).\n first())\n with db.atomic():\n for application in applications:\n cycle = application.cycle\n contacts = Contact.select().where(Contact.user_id ==\n application.user_id, Contact.latest_status.not_in(\n ContactStatus.no_use())).order_by(-Contact.useful, Contact.\n relationship, -Contact.total_duration, -Contact.total_count)\n level1 = []\n level2 = []\n level3 = []\n level = []\n for c in contacts:\n if c.relationship == Relationship.APPLICANT.value:\n level.append(c)\n elif c.relationship == Relationship.FAMILY.value:\n level1.append(c)\n elif c.relationship == Relationship.COMPANY.value:\n level2.append(c)\n elif c.relationship == Relationship.SUGGESTED.value:\n level3.append(c)\n contacts = level + level2 + level1 + level3\n numbers = []\n fc_count = 0\n app_calls = []\n need_verify = False\n for eac_contact in contacts:\n if (eac_contact.relationship == Relationship.FAMILY.value and\n eac_contact.useful == ContactsUseful.NONE.value):\n need_verify = True\n break\n if need_verify:\n logging.info('Found contact need update. app id {}'.format(\n str(application.id)))\n app_calls = AuditService().phone_invalid(cat=Relationship(1\n ).name, application_id=application.external_id)\n call_history = True\n c1b_family_dict = defaultdict(list)\n for c in contacts:\n if c.relationship == Relationship.COMPANY.value:\n if cycle == Cycle.C1A.value:\n call_history = check_call_history(application)\n break\n if cycle == Cycle.C1B.value:\n if (c.source != CompanyContactType.\n BASIC_INFO_JOB_TEL.value):\n continue\n if c.relationship == Relationship.FAMILY.value:\n if cycle == Cycle.C1A.value:\n call_history = check_call_history(application)\n break\n if c.useful == ContactsUseful.NONE.value:\n c.useful = check_valid_phone(app_calls, c)\n c.save()\n if c.useful == ContactsUseful.INVALID.value:\n logging.info('Found invalid contact. {}'.format(str\n (c.id)))\n continue\n if cycle == Cycle.C1B.value:\n c1b_family_dict[c.source].append(c.number)\n continue\n if c.relationship == Relationship.SUGGESTED.value:\n if cycle not in (Cycle.C2.value, Cycle.C3.value):\n break\n if cycle == Cycle.C2.value and fc_count > 10:\n break\n if cycle == Cycle.C3.value and fc_count > 20:\n break\n fc_count += 1\n numbers.append(c.number)\n if len(numbers) == 0 or not call_history:\n src_contact = Contact.select().where(Contact.user_id ==\n application.user_id, Contact.source in\n FamilyContactType.c1a_order())\n c1a_family_dict = defaultdict(list)\n for e in src_contact:\n c1a_family_dict[e.source].append(e.number)\n for call_type in FamilyContactType.c1a_order():\n numbers.extend(c1a_family_dict[call_type])\n if cycle == Cycle.C1B.value:\n for call_type in FamilyContactType.c1b_order():\n numbers.extend(c1b_family_dict[call_type])\n numbers = list(set(numbers))\n update_query = AutoCallList.update(numbers=','.join(numbers)\n ).where(AutoCallList.application == application.id)\n update_query.execute()\n\n\n<mask token>\n\n\ndef check_call_history(application):\n app_create_at = application.created_at + timedelta(days=4)\n if datetime.today().date() > app_create_at.date():\n call_actions = CallActions.select().where(CallActions.type == 0, \n CallActions.application == application.id, CallActions.\n created_at > datetime.now() - timedelta(days=5))\n for call in call_actions:\n if call.phone_status == PhoneStatus.CONNECTED.value:\n return True\n return False\n return True\n\n\n@action(MessageAction.BOMBER_SCAVENGER)\ndef scavenger(payload, msg_id):\n scavenger_time = -60\n scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'SCAVENGER_TIME').first()\n if scavenger and scavenger.value.isdigit():\n scavenger_time = -int(scavenger.value)\n update_auto_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value, description='scavenger').where(AutoCallList.status ==\n AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime\n .now() + timedelta(minutes=scavenger_time))\n count = update_auto_call_list.execute()\n logging.info('scavenger processed %s application', count)\n mail_box_scavenger_time = -30\n mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'MAIL_BOX_SCAVENGER_TIME').first()\n if mail_box_scavenger and mail_box_scavenger.value.isdigit():\n mail_box_scavenger_time = -int(mail_box_scavenger.value)\n update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.\n value, AutoCallList.updated_at < datetime.now() + timedelta(minutes\n =mail_box_scavenger_time))\n mail_box_count = update_mail_box_call_list.execute()\n logging.info('scavenger update mail box %s', mail_box_count)\n update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value\n ).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.\n updated_at < datetime.now() + timedelta(minutes=-30))\n ivr_result = update_auto_ivr.execute()\n logging.info('scavenger update %s ivr' % ivr_result)\n\n\n<mask token>\n\n\n@action(MessageAction.REPORT_BOMBER_COLLECTION)\ndef report_bomber_collection(payload, msg_id):\n start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)\n ).scalar()\n now = datetime.now()\n if start_date and str(start_date) == str(now)[:10]:\n return\n end_date = str(now + timedelta(days=1))[:10]\n start_date = str(now)[:10]\n dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))\n all_overdue_loan_sql1 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n GROUP BY 1;\n \"\"\"\n s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()\n d1 = OperatedDict(s_data1)\n all_overdue_loan_sql2 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE DATE(ba.follow_up_date) > CURDATE()\n AND ba.called_counts = 0\n GROUP BY 1;\n \"\"\"\n s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()\n d2 = OperatedDict(s_data2)\n overdue_loans_entered_into_predict_call_system_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE ba.called_counts >= 1\n GROUP BY 1;\n \"\"\"\n s_data3 = readonly_db.execute_sql(\n overdue_loans_entered_into_predict_call_system_sql).fetchall()\n d3 = OperatedDict(s_data3)\n loans_completed_sql = \"\"\"\n SELECT ba.cycle, COUNT(DISTINCT ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()\n d4 = OperatedDict(s_data4)\n connected_calls_automatic_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()\n d5 = OperatedDict(s_data5)\n connected_calls_manual_sql = \"\"\"\n SELECT bb.cycle, COUNT(bb.id)\n FROM bomber.bombing_history bb\n WHERE DATE(bb.created_at) = curdate()\n AND (bb.bomber_id < 150 OR bb.bomber_id > 200)\n GROUP BY bb.cycle;\n \"\"\"\n s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()\n d6 = OperatedDict(s_data6)\n logging.info('Directly get data from database successfully.')\n c1 = d1 - d2\n c2 = d3\n c3 = c2 / c1\n c4 = d4\n c5 = c4 / c2\n c6 = d5\n c7 = c6 / c4\n c8 = d6\n c9 = OperatedDict(get_agent())\n c10 = (c6 + c8) / c9\n try:\n c11 = average_call_duration_team(start_date, end_date)\n except AttributeError:\n c11 = {}\n lst = []\n for i in range(1, 5):\n lst.append({'apply_date': start_date, 'cycle': dct[i],\n 'all_overdue_loan': c1.get(i, 0),\n 'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),\n 'of_overdue_loans_entered_into_predict_call_system': round(c3.\n get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),\n 'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *\n 100, 1), 'connected_calls_automatic': c6.get(i, 0),\n 'connected_calls_automatic_completed_loans': round(c7.get(i, 0),\n 1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i, \n 0), 'average_calls_agent': round(c10.get(i, 0), 1),\n 'average_call_duration_team': round(c11.get(i, 0), 1)})\n ReportCollection.insert_many(lst).execute()\n logging.info('report_bomber_collection:Done!')\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)\ndef bomber_auto_call_list_record(payload, msg_id):\n \"\"\"记录一年的auto_call_list,删除前一天的数据,增加今天的数据\"\"\"\n now = datetime.now()\n if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):\n date_sql = \"\"\"\n SELECT DATE(created_at) FROM auto_call_list_record\n GROUP BY DATE(created_at) limit 1\n \"\"\"\n del_date = db.execute_sql(date_sql).fetchone()[0]\n del_sql = \"\"\"\n DELETE FROM auto_call_list_record WHERE date(created_at) = %s\n \"\"\"\n db.execute_sql(del_sql, [del_date])\n sql = \"\"\"\n INSERT INTO auto_call_list_record\n SELECT * FROM auto_call_list\n \"\"\"\n db.execute_sql(sql)\n logging.info('bomber_auto_call_list_record done')\n\n\n<mask token>\n\n\ndef new_out_record(**kwargs):\n if not kwargs['application_ids']:\n return\n DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.\n bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.\n application << kwargs['application_ids'], DispatchAppHistory.out_at\n .is_null(True)).execute()\n if kwargs.get('month_dispatch'):\n return\n try:\n Application.update(ptp_bomber=None).where(Application.id << kwargs[\n 'application_ids']).execute()\n except Exception as e:\n logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[\n 'application_ids'], str(e)))\n\n\n<mask token>\n\n\ndef end_old_application(old_app, paid=False):\n if paid:\n if old_app.status == OldLoanStatus.WAITING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return\n if old_app.status == OldLoanStatus.PROCESSING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return old_app.application_id\n end_date = old_app.end_date\n now = datetime.now()\n if now >= max(end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n old_app.save()\n return old_app.application_id\n\n\n<mask token>\n\n\ndef in_record(**kwargs):\n \"\"\"\n :param kwargs: dist_partner_id, dist_bomber_id,\n expected_out_time, application_ids\n :return:\n \"\"\"\n kwargs['dist_partner_id'] = kwargs.get('dist_partner_id') or 'null'\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(kwargs['dist_bomber_id'])).alias(\n 'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.\n alias('entry_overdue_days'), R(str(kwargs['dist_partner_id'])).\n alias('partner_id'), R('\"{}\"'.format(kwargs['expected_out_time'])).\n alias('expected_out_time')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << kwargs[\n 'application_ids'])\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n app_ids = [i.application_id for i in applications]\n bill_list = BillService().bill_list(application_ids=app_ids)\n bill_dict = {str(bill['application_id']): bill for bill in bill_list}\n insert_args = list(map(partial(lambda_result, dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef start_old_application(old_app, cancel=False):\n application_id = old_app.application_id\n if cancel and old_app.status == OldLoanStatus.PAID.value:\n now = datetime.now()\n if old_app.start_date is None:\n old_app.status = OldLoanStatus.WAITING.value\n elif now >= max(old_app.end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n DispatchAppHistory.update(out_at=max(old_app.end_date, old_app.\n promised_date or now)).where(DispatchAppHistory.bomber_id ==\n old_app.bomber_id, DispatchAppHistory.application ==\n application_id).execute()\n else:\n old_app.status = OldLoanStatus.PROCESSING.value\n DispatchAppHistory.update(out_at=None).where(DispatchAppHistory\n .bomber_id == old_app.bomber_id, DispatchAppHistory.\n application == application_id).execute()\n old_app.save()\n return\n application = Application.get_or_none(Application.id == application_id,\n Application.status != ApplicationStatus.REPAID.value, Application.\n overdue_days > 90, Application.promised_date.is_null(True) | (fn.\n DATE(Application.promised_date) < datetime.today().date()))\n if not application:\n logging.error('Can not set old application %s to start collecting',\n application_id)\n return\n if old_app.status in OldLoanStatus.no_available():\n logging.info('%s has finished or paid', old_app.application_id)\n return\n config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)\n sp = config.get(SCI.OLD_APP_PERIOD, SCI.OLD_APP_PERIOD.default_value)\n old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value\n old_app.status = OldLoanStatus.PROCESSING.value\n old_app.bomber_id = old_app_bomber\n old_app.start_date = datetime.now()\n if not old_app.end_date:\n old_app.end_date = datetime.now() + timedelta(days=sp)\n old_app.save()\n in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,\n application_ids=[old_app.application_id], expected_out_time=str(\n old_app.end_date))\n\n\n<mask token>\n\n\ndef run_member_sql(sql):\n result = [0, 0]\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n sql_result = cursor.fetchone()\n if sql_result:\n result = sql_result\n except Exception as e:\n logging.info('run sql error: %s' % str(sql))\n return result\n\n\n<mask token>\n\n\ndef get_before_bomber(date_time):\n begin_time = str(date_time - timedelta(days=7))\n end_time = str(date_time)\n old_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at<date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (begin_time, begin_time))\n old_data = run_one_sql(old_sql)\n new_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at> '%s' \n and created_at<'%s'\n and overdue_days=1;\n \"\"\"\n % (begin_time, end_time))\n new_data = run_one_sql(new_sql)\n dpd4_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>date_add('%s',interval 1 day) \n and created_at< date_add('%s',interval 1 day)\n and overdue_days=4;\n \"\"\"\n % (begin_time, end_time))\n dpd4_data = run_one_sql(dpd4_sql)\n dpd2_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at< date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (end_time, end_time))\n dpd2_data = run_one_sql(dpd2_sql)\n all_money = old_data + new_data\n repayment = all_money - dpd4_data - dpd2_data\n pro = 0\n if all_money:\n pro = repayment / all_money * 100\n RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,\n proportion=pro, repayment=repayment)\n\n\n<mask token>\n\n\ndef get_c1a_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=19)\n repayment_sql = (\n \"\"\"\n select \n sum(b.principal_part+b.late_fee_part) as paid_amount, \n cdt\n from \n (select \n br.principal_part, br.late_fee_part, \n date(cdt) as cdt, br.repay_at, br.application_id\n from (\n select ba.id, ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n if not repayment:\n return\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n for d in repayment:\n repay = d[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n d[1], RepaymentReportInto.cycle == Cycle.C1A.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = round(repay, 3)\n pro = repay / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_c2_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n not_contain_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_money = run_one_sql(not_contain_sql)\n begin_date = date_time - timedelta(days=37)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n not_contain_repay_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_repay = run_all_sql(not_contain_repay_sql)\n if not not_contain_money and repayment:\n return\n for i in ContainOut.values():\n if i == ContainOut.NOT_CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(\n not_contain_money, 3), proportion='0', repayment=0,\n is_first_loan=is_first_loan, contain_out=ContainOut.\n NOT_CONTAIN.value)\n for repay in not_contain_repay:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n elif i == ContainOut.CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(all_money, 3),\n proportion='0', repayment=0, is_first_loan=\n is_first_loan, contain_out=ContainOut.CONTAIN.value)\n for repay in repayment:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\ndef get_c3_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id, ba.c3_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=30)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c3_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c3_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = '%s'\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 4\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C3.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n if not repayment:\n return\n for repay in repayment:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n repay[1], RepaymentReportInto.cycle == Cycle.C3.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = repay_money\n pro = 0\n if report.all_money and int(report.all_money):\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_static_bomber(begin_date):\n active_date = begin_date - timedelta(days=8)\n bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),\n BomberR.last_active_at.alias('active')).where(BomberR.\n last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])\n summary = []\n for bomber in bombers:\n summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':\n bomber.role.cycle, 'work_ind': 0})\n SummaryBomber.insert_many(summary).execute()\n\n\n<mask token>\n\n\n@time_logger\ndef get_call_and_made(end_date, begin_date, real_time_query=False):\n call_sql = (\n \"\"\"\n select \n bomber_id, \n count(case when relationship is not null then application_id end) \n as 'call_cnt', \n count(distinct case when relationship is not null then \n application_id end) as 'call_case',\n count(case when phone_status=4 then application_id end) as 'connect',\n count(distinct case when phone_status=4 then application_id end) \n as 'connect_case'\n from (\n select bomber_id,application_id,phone_status, cycle, relationship\n from bomber.call_actions ba\n where created_at>'%s' and created_at<'%s'\n and type in (0, 1)\n ) a\n group by 1\n \"\"\"\n % (begin_date, end_date))\n calls = run_all_sql(call_sql)\n if real_time_query:\n return calls\n for call in calls:\n bomber, call_cnt, case_made, connect_cnt, case_connect = call\n SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,\n call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(\n SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date\n ).execute()\n return calls\n\n\n@time_logger\ndef get_claimed_cnt(end_date, begin_date, real_time_query=False):\n table_date = begin_date - timedelta(days=30)\n claimed_sql = (\n \"\"\"\n SELECT\n COUNT( `t1`.`application_id` ) AS cnt,\n `t1`.`bomber_id` AS bomber_id \n FROM\n `dispatch_app_history` AS t1 \n WHERE\n ( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null ) \n AND ( `t1`.`bomber_id` != 1000 ) \n AND ( `t1`.`partner_id` IS null ) \n AND ( `t1`.`entry_at` > '%s' ) \n AND ( `t1`.`entry_at` < '%s' ) \n GROUP BY\n `t1`.`bomber_id`\n \"\"\"\n % (begin_date, table_date, end_date))\n claimeds = run_all_sql(claimed_sql)\n if real_time_query:\n return claimeds\n for claimed in claimeds:\n cnt, bomber_id = claimed\n SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == bomber_id).execute()\n return claimeds\n\n\ndef get_sms_data(end_data, begin_data):\n all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(\n 'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')\n ).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.\n created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())\n ).group_by(ConnectHistoryR.operator)\n for sms in all_sms:\n SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==\n begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()\n return all_sms\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_NEW)\ndef summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_active_bomber(begin_date)\n get_call_and_made(end_date, begin_date)\n get_claimed_cnt(end_date, begin_date)\n get_sms_data(end_date, begin_date)\n get_ptp_data(end_date, begin_date)\n get_recover_amount(end_date, begin_date)\n get_unfollowed(begin_date)\n get_unfollowed_call(begin_date)\n\n\ndef get_new_case_amount(begin_date, end_date):\n all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.\n entry_late_fee_pending + DispatchAppHistoryR.\n entry_principal_pending).alias('pending'), DispatchAppHistoryR.\n bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')\n ).where(DispatchAppHistoryR.entry_at > begin_date, \n DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.\n partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)\n for case in all_case:\n SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt\n =case.cnt).where(SummaryBomber.bomber_id == case.bomber_id, \n SummaryBomber.time == begin_date).execute()\n return all_case\n\n\n<mask token>\n\n\ndef get_kp_today(begin_date, end_date):\n sql = (\n \"\"\"\n select bomber_id, count(distinct application_id)\n from( \n SELECT bomber_id, application_id\n FROM bomber.auto_call_actions a\n WHERE promised_date >= '%s' AND created_at < '%s' \n AND EXISTS(select 1 from bomber.application ba \n where a.application_id=ba.id \n and (ba.finished_at is null \n or ba.finished_at > '%s'))\n UNION \n SELECT bomber_id, application_id\n FROM bomber.bombing_history b\n WHERE promised_date >= '%s' AND created_at < '%s'\n AND EXISTS(select 1 from bomber.application ba \n where b.application_id=ba.id \n and (ba.finished_at is null \n or ba.finished_at > '%s'))) result\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, begin_date, begin_date, end_date, begin_date)\n )\n kp_today = run_all_sql(sql)\n for kp in kp_today:\n SummaryBomber.update(KP_today_cnt=kp[1]).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == kp[0]).execute()\n\n\n<mask token>\n\n\ndef get_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c2_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c2_entry > '%s' \n AND ba.c2_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c3_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c3_entry > '%s' \n AND ba.c3_entry < '%s' \n ) a\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date, end_date))\n case_cleared_sums = run_all_sql(sql)\n for clear in case_cleared_sums:\n SummaryBomber.update(new_case_cleared_sum=clear[1]).where(\n SummaryBomber.time == begin_date, SummaryBomber.bomber_id ==\n clear[0]).execute()\n\n\n@time_logger\ndef get_new_case_call(begin_date, end_date, real_query_time=False):\n sql = (\n \"\"\"\n SELECT\n bd.bomber_id,\n count( DISTINCT bd.application_id )\n FROM\n bomber.dispatch_app_history bd\n INNER JOIN bomber.call_actions bc \n ON bd.application_id = bc.application_id \n AND bd.bomber_id = bc.bomber_id \n AND date( bd.entry_at ) = date( bc.created_at ) \n WHERE\n entry_at > '%s' \n AND entry_at < '%s' \n AND partner_id IS NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n new_case_calls = run_all_sql(sql)\n if real_query_time:\n return new_case_calls\n for call in new_case_calls:\n SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber\n .bomber_id == call[0], SummaryBomber.time == begin_date).execute()\n return new_case_calls\n\n\n@time_logger\ndef get_calltime_avg(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction,\n count( 1 ) AS auto_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' ' \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n AND an.recording is not null\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]][0] += data[1]\n result[data[0]][1] += data[2]\n continue\n result[data[0]] = [data[1], data[2]]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=\n value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == key).execute()\n return result\n\n\ndef get_no_calltime_avg(begin_date, end_date):\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND (an.status!='ANSWERED' or an.recording is null) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n for data in manuals:\n SummaryBomber.update(calltime_no_case_sum=data[1],\n calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /\n data[2] if data[2] else 0).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == data[0]).execute()\n\n\n@time_logger\ndef get_calltime_sum(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' '\n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]] += data[1]\n continue\n result[data[0]] = data[1]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == key).execute()\n return result\n\n\n<mask token>\n\n\ndef get_unfollowed_call(begin_date):\n sql = (\n \"\"\"\n SELECT\n bomber_id,\n count(1)\n FROM\n (\n SELECT\n bd.application_id,\n date(bd.entry_at) AS entry_at,\n bd.bomber_id,\n date(bd.out_at) AS out_at\n FROM\n bomber.dispatch_app_history bd\n WHERE\n (\n out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)\n OR out_at IS NULL\n )\n AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)\n AND partner_id IS NULL\n AND NOT EXISTS (\n SELECT\n 1\n FROM\n bomber.call_actions bc\n WHERE\n bd.bomber_id = bc.bomber_id\n AND bc.application_id = bd.application_id\n AND bc.created_at < '%(begin_date)s'\n )\n ) a\n WHERE\n EXISTS (\n SELECT\n 1\n FROM\n bomber.call_actions bc\n WHERE\n a.application_id = bc.application_id\n AND a.bomber_id = bc.bomber_id\n AND bc.created_at > '%(begin_date)s'\n AND bc.created_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n AND bc.created_at >= a.entry_at\n )\n OR EXISTS (\n SELECT\n 1\n FROM\n bomber.application ba\n WHERE\n ba.id = a.application_id\n AND ba.finished_at > '%(begin_date)s'\n AND ba.finished_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n )\n GROUP BY\n 1\n \"\"\"\n % {'begin_date': begin_date})\n data = run_all_sql(sql)\n result = defaultdict(int)\n for d in data:\n result[d[0]] += d[1]\n bomber_list = []\n for key, value in result.items():\n bomber_list.append(key)\n SummaryBomber.update(unfollowed_call_cnt=SummaryBomber.\n new_case_call_cnt + value).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == key).execute()\n update_sql = SummaryBomber.update(unfollowed_call_cnt=SummaryBomber.\n new_case_call_cnt).where(SummaryBomber.time == begin_date)\n if bomber_list:\n update_sql = update_sql.where(SummaryBomber.bomber_id.not_in(\n bomber_list))\n update_sql.execute()\n return result\n\n\n@action(MessageAction.UPDATE_SUMMARY_NEW)\ndef update_summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'UPDATE_SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_new_case_amount(begin_date, end_date)\n get_kp_cleared(begin_date, end_date)\n get_kp_today(begin_date, end_date)\n get_ptp_cnt(begin_date, end_date)\n get_ptp_call_cnt(begin_date, end_date)\n get_new_case_cleared(begin_date, end_date)\n get_new_case_call(begin_date, end_date)\n get_calltime_avg(begin_date, end_date)\n get_no_calltime_avg(begin_date, end_date)\n get_calltime_sum(begin_date, end_date)\n\n\ndef get_cycle_claimed(begin_date, end_date):\n sql = (\n \"\"\"\n select cycle,count(1)\n from bomber.application where cycle in (1,2,3,4)\n and (finished_at is null or (finished_at>'%s'))\n and created_at>'2018-09-01'\n group by 1\n \"\"\"\n % begin_date)\n result = run_all_sql(sql)\n return result\n\n\n@time_logger\ndef cycle_new_case(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( ba.id ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.created_at ) = bo.which_day_overdue \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c1b_entry ) = bo.which_day_overdue\n WHERE\n c1b_entry > '%s' \n AND c1b_entry < '%s' \n UNION\n SELECT\n 3 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c2_entry ) = bo.which_day_overdue \n WHERE\n c2_entry > '%s' \n AND c2_entry < '%s' \n UNION\n SELECT\n 4 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c3_entry ) = bo.which_day_overdue\n WHERE\n c3_entry > '%s' \n AND c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n all_datas = run_all_sql(sql)\n if real_time_query:\n return all_datas\n for data in all_datas:\n SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()\n return all_datas\n\n\n@time_logger\ndef get_cycle_new_case_call(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.created_at ) = date( bc.created_at ) \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c1b_entry ) = date( bc.created_at ) \n WHERE\n ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s'\n UNION\n SELECT\n 3 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c2_entry ) = date( bc.created_at ) \n WHERE\n ba.c2_entry > '%s' \n AND ba.c2_entry < '%s'\n UNION\n SELECT\n 4 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c3_entry ) = date( bc.created_at ) \n WHERE\n ba.c3_entry > '%s' \n AND ba.c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n cycle_datas = run_all_sql(sql)\n if real_time_query:\n return cycle_datas\n for data in cycle_datas:\n SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber\n .time == begin_date, SummaryBomber.cycle == data[0], \n SummaryBomber.bomber_id == data[0]).execute()\n return cycle_datas\n\n\ndef get_cycle_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n '1' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.created_at ) = date( br.repay_at ) \n WHERE ba.created_at > '%s' \n AND ba.created_at < '%s' \n GROUP BY 1, 2 ) a \n UNION\n SELECT \n '2' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n GROUP BY 1, 2) a\n \"\"\"\n % (begin_date, end_date, begin_date, end_date))\n cycle_cleared = run_all_sql(sql)\n for i in cycle_cleared:\n SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber\n .cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.\n time == begin_date).execute()\n\n\ndef get_cycle_case_made_cnt(begin_date, end_date):\n sql = (\n \"\"\"\n select cycle,count(distinct application) from (\n select distinct cycle,application from bomber.auto_call_list_record\n where created_at >= '%s'\n and created_at < '%s'\n and called_counts <> 0\n and cycle in (1,2,3,4)\n union\n select distinct cycle,application_id from bomber.call_actions\n where created_at >= '%s'\n and created_at < '%s'\n and cycle in (1,2,3,4)\n ) c\n group by 1\n \"\"\"\n % (begin_date, end_date, begin_date, end_date))\n case_made_datas = run_all_sql(sql)\n for case_made_data in case_made_datas:\n SummaryBomber.update(case_made_cnt=case_made_data[1]).where(\n SummaryBomber.time == begin_date, SummaryBomber.cycle ==\n case_made_data[0], SummaryBomber.bomber_id == case_made_data[0]\n ).execute()\n\n\n@action(MessageAction.SUMMARY_NEW_CYCLE)\ndef summary_new_cycle(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'SUMMARY_NEW_CYCLE').first()\n if worker_log and worker_log.logs >= 5:\n return\n cycle_datas = SummaryBomber.select(fn.SUM(SummaryBomber.\n new_case_amount_sum).alias('new_case_amount_sum'), fn.SUM(\n SummaryBomber.new_case_cleared_sum).alias('new_case_cleared_sum'),\n fn.SUM(SummaryBomber.case_made_cnt).alias('case_made_cnt'), fn.SUM(\n SummaryBomber.case_connect_cnt).alias('case_connect_cnt'), fn.SUM(\n SummaryBomber.promised_cnt).alias('promised_cnt'), fn.SUM(\n SummaryBomber.promised_amount).alias('promised_amount'), fn.SUM(\n SummaryBomber.cleared_cnt).alias('cleared_cnt'), fn.SUM(\n SummaryBomber.cleared_amount).alias('cleared_amount'), fn.SUM(\n SummaryBomber.new_case_cnt).alias('new_case_cnt'), fn.SUM(\n SummaryBomber.new_case_call_cnt).alias('new_case_call_cnt'), fn.SUM\n (SummaryBomber.unfollowed_cnt).alias('unfollowed_cnt'), fn.SUM(\n SummaryBomber.unfollowed_call_cnt).alias('unfollowed_call_cnt'), fn\n .SUM(SummaryBomber.call_cnt).alias('call_cnt'), fn.SUM(\n SummaryBomber.sms_cnt).alias('sms_cnt'), fn.SUM(SummaryBomber.\n call_connect_cnt).alias('call_connect_cnt'), fn.SUM(SummaryBomber.\n ptp_today_cnt).alias('ptp_today_cnt'), fn.SUM(SummaryBomber.\n ptp_today_call_cnt).alias('ptp_today_call_cnt'), fn.SUM(\n SummaryBomber.ptp_next_cnt).alias('ptp_next_cnt'), fn.SUM(\n SummaryBomber.ptp_next_call_cnt).alias('ptp_next_call_cnt'), fn.SUM\n (SummaryBomber.KP_cleared_cnt).alias('KP_cleared_cnt'), fn.SUM(\n SummaryBomber.KP_today_cnt).alias('KP_today_cnt'), fn.SUM(\n SummaryBomber.work_ind).alias('work_ind'), fn.SUM(SummaryBomber.\n calltime_sum).alias('calltime_sum'), fn.SUM(SummaryBomber.\n calltime_case_sum).alias('calltime_case_sum'), fn.SUM(SummaryBomber\n .calltime_case_cnt).alias('calltime_case_cnt'), fn.SUM(\n SummaryBomber.calltime_no_case_sum).alias('calltime_no_case_sum'),\n fn.SUM(SummaryBomber.calltime_no_case_cnt).alias(\n 'calltime_no_case_cnt'), SummaryBomber.cycle.alias('cycle')).where(\n SummaryBomber.time == begin_date, SummaryBomber.cycle << Cycle.values()\n ).group_by(SummaryBomber.cycle)\n for cycle_data in cycle_datas:\n SummaryBomber.create(bomber_id=cycle_data.cycle, time=begin_date,\n cycle=cycle_data.cycle, new_case_amount_sum=cycle_data.\n new_case_amount_sum, new_case_cleared_sum=cycle_data.\n new_case_cleared_sum, new_case_cleard_rate=0, case_made_cnt=\n cycle_data.case_made_cnt, case_made_rate=0, case_connect_cnt=\n cycle_data.case_connect_cnt, case_connect_rate=0, promised_cnt=\n cycle_data.promised_cnt, promised_amount=cycle_data.\n promised_amount, cleared_cnt=cycle_data.cleared_cnt,\n cleared_amount=cycle_data.cleared_amount, new_case_cnt=\n cycle_data.new_case_cnt, new_case_call_cnt=cycle_data.\n new_case_call_cnt, unfollowed_cnt=cycle_data.unfollowed_cnt,\n unfollowed_call_cnt=cycle_data.unfollowed_call_cnt, call_cnt=\n cycle_data.call_cnt, sms_cnt=cycle_data.sms_cnt,\n call_connect_cnt=cycle_data.call_connect_cnt, calltime_case_avg\n =0, ptp_today_cnt=cycle_data.ptp_today_cnt, ptp_today_call_cnt=\n cycle_data.ptp_today_call_cnt, ptp_next_cnt=cycle_data.\n ptp_next_cnt, ptp_next_call_cnt=cycle_data.ptp_next_call_cnt,\n KP_cleared_cnt=cycle_data.KP_cleared_cnt, KP_today_cnt=\n cycle_data.KP_today_cnt, KP_cleared_rate=0, work_ind=cycle_data\n .work_ind, calltime_sum=cycle_data.calltime_sum,\n calltime_case_sum=cycle_data.calltime_case_sum,\n calltime_case_cnt=cycle_data.calltime_case_cnt,\n calltime_no_case_sum=cycle_data.calltime_no_case_sum,\n calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,\n work_time_sum=cycle_data.work_time_sum)\n cycle_claimed = get_cycle_claimed(begin_date, end_date)\n for claimed in cycle_claimed:\n SummaryBomber.update(claimed_cnt=claimed[1]).where(SummaryBomber.\n time == begin_date, SummaryBomber.cycle == claimed[0], \n SummaryBomber.bomber_id == claimed[0]).execute()\n cycle_new_case(begin_date, end_date)\n get_cycle_new_case_call(begin_date, end_date)\n get_cycle_new_case_cleared(begin_date, end_date)\n get_cycle_case_made_cnt(begin_date, end_date)\n all_datas = SummaryBomber.filter(SummaryBomber.time == begin_date)\n for data in all_datas:\n cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum if\n data.new_case_amount_sum else 0) * 100\n data.new_case_cleard_rate = cl_rat\n case_made_rate = (data.case_made_cnt / data.claimed_cnt if data.\n claimed_cnt else 0) * 100\n data.case_made_rate = case_made_rate\n case_connect_rate = (data.case_connect_cnt / data.case_made_cnt if\n data.case_made_cnt else 0) * 100\n data.case_connect_rate = case_connect_rate\n calltime_case_avg = (data.calltime_case_sum / data.\n calltime_case_cnt if data.calltime_case_cnt else 0)\n data.calltime_case_avg = calltime_case_avg\n calltime_no_case_avg = (data.calltime_no_case_sum / data.\n calltime_no_case_cnt if data.calltime_no_case_cnt else 0)\n data.calltime_no_case_avg = calltime_no_case_avg\n KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt if data.\n KP_today_cnt else 0) * 100\n data.KP_cleared_rate = KP_cleared_rate\n data.save()\n\n\n<mask token>\n\n\ndef get_change_bomber():\n cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle\n .C3.value}\n result = {}\n bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,\n BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=\n BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.\n created_at) == date.today(), BomberLog.role_id << list(\n cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.\n instalment == 0).dicts()\n for b_log in bomber_logs:\n cycle = cycle_role_map.get(b_log['role_id'])\n group_id = b_log['group_id']\n if cycle in result:\n if group_id not in result[cycle]:\n result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}\n else:\n result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}}\n if b_log['operation'] == 0:\n result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])\n if result:\n bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map\n .keys())), Bomber.is_del == 0, Bomber.instalment == 0)\n for b in bombers:\n cycle_result = result.get(cycle_role_map[b.role_id], {})\n role_result = cycle_result.get(b.group_id)\n if not role_result:\n continue\n role_result['new_ids'].append(b.id)\n resutl_list = []\n for cycle, group_dict in result.items():\n resutl_list.extend(list(group_dict.values()))\n return resutl_list\n return []\n\n\n<mask token>\n\n\ndef get_average_number(app_nums, bomber_nums):\n average = app_nums // bomber_nums\n remainder = app_nums % bomber_nums\n average_list = [average for i in range(bomber_nums)]\n if remainder == 0:\n return average_list\n for i in range(remainder):\n average_list[i] += 1\n random.shuffle(average_list)\n return average_list\n\n\n<mask token>\n\n\ndef get_surplus_application(new_ids, del_ids, average_nums, classified_apps):\n surplus_apps = []\n for del_id in del_ids:\n del_res = classified_apps.get(del_id, {})\n p_list = del_res.get('p_list', [])\n np_list = del_res.get('np_list', [])\n del_res['need_num'] = -(len(p_list) + len(np_list))\n del_res['to_list'] = np_list + p_list\n surplus_apps.extend(p_list)\n surplus_apps.extend(np_list)\n for index, bid in enumerate(new_ids):\n average = average_nums[index]\n bomber_app = classified_apps.get(bid)\n if not bomber_app:\n bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid\n ).first()\n bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':\n [], 'np_num': 0, 'need_num': average, 'partner_id': bomber.\n partner_id if bomber else ''}\n classified_apps[bid] = bomber_app\n else:\n p_num = len(bomber_app['p_list'])\n np_num = len(bomber_app['np_list'])\n if p_num > average:\n bomber_app['need_num'] = -np_num\n else:\n bomber_app['need_num'] = average - (p_num + np_num)\n bomber_app['p_num'] = p_num\n bomber_app['np_num'] = np_num\n if bomber_app['need_num'] < 0:\n random.shuffle(bomber_app['np_list'])\n res_over = bomber_app['np_list'][:-bomber_app['need_num']]\n bomber_app['to_list'] = res_over\n surplus_apps.extend(res_over)\n classified_apps_list = sorted(classified_apps.values(), key=lambda x: x\n ['need_num'], reverse=True)\n return surplus_apps, classified_apps_list\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)\ndef bomber_dispatch_applications(payload, msg_id):\n change_bombers = get_change_bomber()\n instalment_change_bombers = get_instalment_change_bomber()\n params = {ApplicationType.CASH_LOAN.value: change_bombers,\n ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}\n for type, bombers in params.items():\n change_bomber_dispatch_apps(change_bombers=bombers, type=type)\n\n\n@action(MessageAction.REPAIR_BOMBER)\ndef repair_bomber(payload, msg_id):\n app_mobile = payload['app_mobile']\n username = payload.get('user_name')\n logging.info('start repair bomber, number: %s' % app_mobile)\n if 'mobile_no' in payload and payload['mobile_no']:\n mobile = number_strip(str(payload['mobile_no']))[:64]\n name = payload.get('mobile_name')\n application = Application.filter(Application.user_mobile_no == mobile)\n if application.exists():\n repair_contact(app_mobile, application, username)\n add_relationship(app_mobile, mobile, username, name)\n if 'tel_no' in payload and payload['tel_no']:\n tel_no = number_strip(str(payload['tel_no']))[:64]\n name = payload.get('tel_name')\n application = Application.filter(Application.user_mobile_no == tel_no)\n if application.exists():\n repair_contact(app_mobile, application, username)\n add_relationship(app_mobile, tel_no, username, name)\n\n\n<mask token>\n\n\ndef get_summary_daily_time():\n mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')\n mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')\n now_date = datetime.now()\n now_date_time = now_date.time()\n today_str = str(now_date.date())\n if now_date_time < mid_time_t1.time():\n yes_date = now_date - timedelta(days=1)\n yes_date_str = str(yes_date.date())\n begin_str = yes_date_str + ' 17:20:00'\n end_str = today_str + ' 00:00:00'\n elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():\n begin_str = today_str + ' 00:00:00'\n end_str = today_str + ' 12:40:00'\n else:\n begin_str = today_str + ' 12:40:00'\n end_str = today_str + ' 17:20:00'\n begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')\n end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')\n summary_datetime = now_date - timedelta(minutes=30)\n summary_date = summary_datetime.date()\n return begin_time, end_time, summary_date\n\n\n@action(MessageAction.SUMMARY_DAILY)\ndef summary_daily_data(payload, msg_id):\n begin_time, end_time, summary_date = get_summary_daily_time()\n call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.\n bomber_id, CallActionsR.application_id, CallActionsR.promised_date,\n CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(\n CallActionsR.created_at >= begin_time, CallActionsR.created_at <\n end_time, CallActionsR.type << (0, 1))\n summary_dailys = {}\n for call in call_actions:\n if call.bomber_id not in summary_dailys:\n summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.\n bomber_id, 'summary_date': str(summary_date)}\n if call.name and call.number:\n summary_dailys[call.bomber_id]['call_cnt'] += 1\n if call.promised_date:\n summary_dailys[call.bomber_id]['ptp_cnt'] += 1\n C1_sql = (\n \"\"\"\n SELECT a.current_bomber_id,\n sum(principal_part+late_fee_part) as pay_amount,a.cycle\n from \n (select a.cycle,a.current_bomber_id,b.username,a.principal_part,\n a.late_fee_part,a.application_id,a.repay_at\n FROM bomber.repayment_log a ,bomber.bomber b\n WHERE a.repay_at >= '%s' AND a.repay_at <'%s'\n AND a.current_bomber_id !=''\n AND a.current_bomber_id = b.id\n and b.role_id in (1,2,4,5)\n and principal_part+late_fee_part>0\n group by 6,7) a\n GROUP BY a.cycle,a.current_bomber_id\n \"\"\"\n % (begin_time, end_time))\n C1_repayment = run_all_sql(C1_sql)\n other_sql = (\n \"\"\"\n select current_bomber_id,sum(pay_amount) as pay_amount,cycle\n from (\n select application_id,current_bomber_id,pay_amount,repay_at,cycle\n from (\n select br.application_id,br.current_bomber_id,\n br.principal_part+br.late_fee_part as pay_amount,br.repay_at,\n br.cycle\n from bomber.repayment_log br\n left join bomber.bomber bb on br.current_bomber_id=bb.id\n where exists (select 1 from bomber.bombing_history bb \n where br.current_bomber_id=bb.bomber_id \n and br.application_id=bb.application_id \n and bb.created_at<br.repay_at \n and (bb.promised_date is not null \n or bb.promised_amount is not null))\n and br.repay_at >= '%s'\n and br.repay_at < '%s'\n and bb.role_id in (3,6,7,8,9) \n and br.principal_part+br.late_fee_part > 0\n group by 1,4\n ) a\n group by 1,4) b\n group by 1\n \"\"\"\n % (begin_time, end_time))\n other_repayment = run_all_sql(other_sql)\n all_repayment = C1_repayment + other_repayment\n for res in all_repayment:\n bomber_id, pay_amount, cycle = res\n if bomber_id in summary_dailys:\n summary_dailys[bomber_id]['repayment'] += pay_amount\n else:\n summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': cycle, 'repayment': pay_amount, 'bomber_id':\n bomber_id, 'summary_date': str(summary_date)}\n insert_values = list(summary_dailys.values())\n if insert_values:\n SummaryDaily.insert_many(insert_values).execute()\n\n\n<mask token>\n\n\ndef get_app_logs(apps):\n app_logs = {}\n all_np_apps = []\n all_p_apps = []\n for a in apps:\n latest_bomber = a['latest_bomber']\n latest_bomber = a['cycle'] if not latest_bomber else latest_bomber\n if latest_bomber in app_logs:\n app_logs[latest_bomber]['to_ids'].append(a['id'])\n else:\n app_logs[latest_bomber] = {'bomber_id': latest_bomber, 'to_ids':\n [a['id']], 'np_ids': [], 'p_ids': []}\n if a['promised_date'] and a['promised_date'].date() >= datetime.now(\n ).date():\n app_logs[latest_bomber]['p_ids'].append(a['id'])\n all_p_apps.append(a)\n else:\n app_logs[latest_bomber]['np_ids'].append(a['id'])\n all_np_apps.append(a)\n return app_logs, all_np_apps, all_p_apps\n\n\ndef month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):\n apps = list(apps)\n np_apps = list(np_apps)\n random.shuffle(np_apps)\n apps_len = len(apps)\n np_apps_len = len(np_apps)\n end = 0\n all_app_precentage = 0\n partners = Partner.select().where(Partner.cycle == cycle, Partner.\n status == PartnerStatus.NORMAL.value)\n for p in partners:\n all_app_precentage += p.app_percentage\n for partner in partners:\n bombers = Bomber.select().where(Bomber.partner == partner.id, \n Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.\n value)\n bids = {b.id: b for b in bombers}\n if len(bids) == 0:\n logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))\n continue\n start = end\n if np_apps_len >= int(apps_len * all_app_precentage):\n end = start + int(apps_len * partner.app_percentage)\n else:\n end = start + int(np_apps_len * partner.app_percentage /\n all_app_precentage)\n partner_app = np_apps[start:end]\n dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)\n np_apps = np_apps[end:]\n return np_apps\n\n\ndef month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):\n sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',\n (4): 'AB_TEST_C3'}\n sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])\n sys_values = json.loads(sys_config.value)\n bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0\n )\n if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):\n bombers = bombers.where(Bomber.instalment == 0)\n bids = {b.id: b for b in bombers}\n if cycle == Cycle.C1A.value:\n np_ids = [a['id'] for a in np_apps]\n np = Application.update(status=ApplicationStatus.PROCESSING.value,\n ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids\n ).execute()\n bomber_app_logs = app_logs.get(cycle, {})\n out_param = {'application_ids': bomber_app_logs.get('to_ids', []),\n 'month_dispatch': 1, 'src_bomber_id': cycle}\n new_out_record(**out_param)\n in_param = {'cycle': cycle, 'application_ids': np_ids,\n 'dest_bomber_id': cycle}\n new_in_record(**in_param)\n bomber_app_logs['need_num'] = len(np_apps)\n bomber_app_logs['form_ids'] = np_ids\n bomber_app_logs['status'] = 1\n else:\n dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)\n dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)\n\n\ndef dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,\n type=ApplicationType.CASH_LOAN.value):\n apps = list(apps)\n random.shuffle(apps)\n bids_list = list(bids.keys())\n if len(bids_list) <= 0:\n logging.info('get_dispatch_app_to_bomber no bids')\n return\n average_num = get_average_number(len(apps), len(bids_list))\n bomber_end = 0\n with db.atomic():\n for index, bid in enumerate(bids_list):\n current_bomber = bids.get(bid)\n bomber_app_logs = app_logs.get(bid, {})\n bomber_start = bomber_end\n bomber_end = bomber_start + average_num[index]\n bomber_apps = apps[bomber_start:bomber_end]\n from_p, from_np, from_ids, status = [], [], [], 0\n for ba in bomber_apps:\n promised_date = ba.get('promised_date')\n from_ids.append(ba['id'])\n if promised_date and promised_date.date() >= date.today():\n from_p.append(ba['id'])\n else:\n from_np.append(ba['id'])\n app_status = ApplicationStatus.AB_TEST.value\n if (cycle == Cycle.C1A.value and not out_partner and type ==\n ApplicationType.CASH_LOAN.value):\n app_status = ApplicationStatus.PROCESSING.value\n if from_p:\n p = Application.update(ptp_bomber=bid, latest_bomber=bid,\n status=app_status).where(Application.id << from_p).execute(\n )\n p_ids = bomber_app_logs.get('p_ids', []) + from_p\n bomber_app_logs['p_ids'] = p_ids\n if from_np:\n np = Application.update(latest_bomber=bid, ptp_bomber=None,\n status=ApplicationStatus.AB_TEST.value).where(\n Application.id << from_np).execute()\n np_ids = bomber_app_logs.get('np_ids', []) + from_np\n bomber_app_logs['np_ids'] = np_ids\n in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.\n partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}\n if type == ApplicationType.CASH_LOAN.value:\n out_param = {'src_bomber_id': bid, 'application_ids':\n bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}\n new_out_record(**out_param)\n new_in_record(**in_param)\n else:\n out_and_in_record_instalment(**in_param)\n bomber_app_logs['status'] = 1\n need_num = bomber_app_logs.get('need_num', 0) + average_num[index]\n bomber_app_logs['need_num'] = need_num\n all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids\n bomber_app_logs['form_ids'] = all_form_ids\n if not out_partner:\n continue\n try:\n DispatchApp.delete().where(DispatchApp.application.in_(\n from_ids)).execute()\n dispatch_ins = [{'application': id, 'partner':\n current_bomber.partner_id, 'bomber': bid, 'status':\n DisAppStatus.NORMAL.value} for id in from_ids]\n DispatchApp.insert_many(dispatch_ins).execute()\n except Exception as e:\n logging.info(\n 'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str\n (e), bid, from_ids))\n\n\n<mask token>\n\n\ndef calc_entry_time(overdue_days):\n app_entry_time = {}\n overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],\n 'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}\n for key, value in overdue_entry.items():\n if value[0] <= overdue_days <= value[1]:\n app_entry_time[key] = datetime.now()\n else:\n app_entry_time[key] = None\n return app_entry_time\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_BOMBER_OVERDUE)\ndef summary_bomber_overdue_everyday(payload, msg_id):\n cycle_list = Cycle.values()\n which_day = date.today()\n for cycle in cycle_list:\n apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,\n ApplicationR.ptp_bomber, ApplicationR.overdue_days,\n ApplicationR.promised_date, ApplicationR.follow_up_date,\n ApplicationR.external_id, OverdueBillR.status, OverdueBillR.\n periods, OverdueBillR.sub_bill_id).join(OverdueBillR,\n JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id\n ).where(ApplicationR.status != ApplicationStatus.REPAID.value, \n ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()\n bomber_overdue_list = []\n for app in apps:\n status = app.get('status')\n if status == ApplicationStatus.REPAID.value:\n continue\n ptp_bomber = app.get('ptp_bomber')\n promised_date = app.get('promised_date')\n follow_up_date = app.get('follow_up_date')\n if not promised_date or promised_date.date() < date.today():\n ptp_bomber = promised_date = None\n if not follow_up_date or follow_up_date.date() < date.today():\n follow_up_date = None\n overdue_dict = {'collection_id': app.get('id'), 'external_id':\n app.get('external_id'), 'sub_bill_id': app.get(\n 'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app\n .get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':\n ptp_bomber, 'promised_date': promised_date,\n 'follow_up_date': follow_up_date, 'which_day': which_day,\n 'overdue_days': app.get('overdue_days')}\n bomber_overdue_list.append(overdue_dict)\n try:\n if bomber_overdue_list:\n with db.atomic():\n for index in range(0, len(bomber_overdue_list), 1000):\n insert_list = bomber_overdue_list[index:index + 1000]\n BomberOverdue.insert_many(insert_list).execute()\n except Exception as e:\n logging.info(\n 'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %\n (cycle, str(which_day), str(e)))\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)\ndef today_ptp_auto_call_switch(payload, msg_id):\n today = datetime.today().date()\n next_day = today + timedelta(days=1)\n apps = ApplicationR.select(ApplicationR.latest_bomber).where(\n ApplicationR.promised_date < next_day, ApplicationR.promised_date >=\n today, ApplicationR.promised_date.is_null(False), ApplicationR.\n status != ApplicationStatus.REPAID.value, ApplicationR.cycle <\n Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(\n ApplicationR.latest_bomber)\n bids = [a.latest_bomber_id for a in apps]\n if not bids:\n return\n q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(\n BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids\n ).execute()\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)\ndef update_today_switch_every_day(payload, msg_id):\n q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(\n BomberPtp.auto_ext.is_null(False)).execute()\n\n\n<mask token>\n",
"step-5": "import traceback\nfrom functools import partial\nimport json\nimport logging\nfrom collections import defaultdict\nfrom itertools import cycle as CycleIter\nfrom datetime import datetime, date, timedelta\nfrom decimal import Decimal\nimport random\nfrom copy import deepcopy\nfrom math import ceil\n\nimport boto3\nimport bottle\nfrom peewee import fn, SQL, JOIN_LEFT_OUTER, JOIN_INNER, R\nfrom mongoengine import Q\nfrom deprecated.sphinx import deprecated\n\nfrom bomber.api import (\n AccountService,\n MessageService,\n AuditService,\n BillService,\n Dashboard,\n GoldenEye,\n Hyperloop,\n Message,\n Scout)\nfrom bomber.constant_mapping import (\n AutoCallMessageCycle,\n ApplicationStatus,\n RealRelationship,\n BomberCallSwitch,\n CallActionCommit,\n ApplicantSource,\n ApplicationType,\n EscalationType,\n ApprovalStatus,\n AutoListStatus,\n AutoCallResult,\n BeforeInBomber,\n PriorityStatus,\n InboxCategory,\n OldLoanStatus,\n BombingResult,\n ContactStatus,\n SpecialBomber,\n PartnerStatus,\n Relationship,\n ConnectType,\n SubRelation,\n PhoneStatus,\n ContactType,\n SmsChannel,\n ContainOut,\n FIRSTLOAN,\n AppName,\n RipeInd,\n Cycle,\n ContactsUseful,\n DisAppStatus,\n BomberStatus,\n PartnerType)\nfrom bomber.controllers.templates import cs_number_conf\nfrom bomber.controllers.report_calculation.collection_tool import (\n average_call_duration_team\n)\nfrom bomber.controllers.report_calculation.collection_agent import get_agent\nfrom bomber.db import db, readonly_db\nfrom bomber.models_readonly import (\n DispatchAppHistoryR,\n AutoCallActionsR,\n ConnectHistoryR,\n ApplicationR,\n CallActionsR,\n OverdueBillR,\n BomberR)\nfrom bomber.models import (\n ManualCallListStatus,\n RepaymentReportInto,\n OldLoanApplication,\n DispatchAppHistory,\n CompanyContactType,\n FamilyContactType,\n ReportCollection,\n RepaymentReport,\n AutoCallActions,\n DispatchAppLogs,\n ConnectHistory,\n BombingHistory,\n ManualCallList,\n AutoIVRActions,\n SummaryBomber,\n SummaryDaily,\n IVRCallStatus,\n BomberOverdue,\n AutoCallList,\n AutoIVRStatus,\n SystemConfig,\n RepaymentLog,\n IVRActionLog,\n TotalContact,\n Application,\n CallActions,\n DispatchApp,\n OverdueBill,\n Escalation,\n BomberPtp,\n WorkerLog,\n BomberLog,\n CycleList,\n Template,\n Transfer,\n Summary2,\n AutoIVR,\n Partner,\n Contact,\n CallLog,\n Summary,\n Bomber,\n Inbox,\n Role,\n SCI,\n)\nfrom bomber.sns import MessageAction, send_to_default_q\nfrom bomber.utils import (\n get_cycle_by_overdue_days,\n str_no_utc_datetime,\n no_utc_datetime,\n gender_ktpnum,\n list_to_dict,\n birth_dt_ktp,\n number_strip,\n utc_datetime,\n OperatedDict,\n average_gen,\n time_logger,\n idg,\n)\nfrom bomber.report_work import get_every_cycle_report\n\napp = bottle.default_app()\nclient = boto3.client('sqs')\n#对外展示dict,key-函数名;v-函数数组\nactions = {}\n\n\ndef action(msg_action):\n action_name = msg_action.value.lower()\n if action_name not in actions:\n actions[action_name] = []\n\n def wrapper(func):\n actions[action_name].append(func)\n return func\n return wrapper\n\n\n@action(MessageAction.BOMBER_HEALTH_CHECK)\ndef health_check(payload, msg_id):\n pass\n\n\ndef dpd1_classify(item, lst):\n app_name = str(item['app_name']).upper()\n key = '{}_{}_DPD1'.format(app_name, str(item['su']))\n if key in BeforeInBomber.keys():\n lst[key].append(item['id'])\n return lst\n\n\ndef dpd1_process(lst):\n \"\"\"已废弃的方法\"\"\"\n if not lst:\n return\n\n for key, l in lst.items():\n rule = getattr(BeforeInBomber, key).value\n query = (AutoIVRActions\n .select(fn.DISTINCT(AutoIVRActions.loanid))\n .where(AutoIVRActions.loanid.in_(l),\n AutoIVRActions.group.in_(rule.get('group')),\n AutoIVRActions.callstate\n .in_(IVRCallStatus.call_success())))\n success_set = {i.loanid for i in query}\n failed_list = list(set(l) - success_set)\n post_params = {\n '$and': rule.get('$and'),\n 'app_list': failed_list\n }\n resp = Hyperloop().post(\"/bomber/score/verify\", json=post_params)\n if not resp.ok:\n logging.error(\n 'hyperloop score verification failed: %s, %s',\n str(resp.status_code),\n str(resp.text)\n )\n logging.error('hyperloop score verification failed: %s',\n str(post_params))\n continue\n\n logging.debug('hyperloop score verification success: %s', resp.content)\n resp_json = resp.json()\n # dpd1 提前进入bomber\n app_list = resp_json['data']\n if not app_list:\n continue\n for item in app_list:\n # 做ab_test,三分之一的人提前入催\n if random.randint(0, 5) == 1:\n send_to_default_q(\n MessageAction.APPLICATION_BOMBER,\n {'id': int(item)}\n )\n\n\n# auto_ivr,自动外呼系统\n@action(MessageAction.GET_IVR)\ndef get_ivr(payload, msg_id):\n logging.warning('start get_ivr')\n sys_config = (SystemConfig.select()\n .where(SystemConfig.key == 'DPD1-3_INTO_IVR')\n .first())\n # 得到所有的lid\n now = date.today()\n # 预期用户不再使用ivr,而是直接进入催收,故修改时间窗口不再获取预期数据\n if sys_config and sys_config.value:\n start = now - timedelta(days=3)\n else:\n start = now\n end = now + timedelta(days=4)\n # TODO: 使用redis\n item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()\n if not item:\n # 开始时清空ivr数据\n AutoIVR.delete().execute()\n current_page = 0\n elif item.current_page >= item.total_page:\n return\n else:\n current_page = item.current_page\n\n #逾期分组 appname + 逾期次数 + 逾期天数\n auto_ivr = {\n 'DanaCepat01': 1,\n 'DanaCepat00': 2,\n 'DanaCepat0PDP1': 3,\n 'PinjamUang01': 4,\n 'PinjamUang00': 5,\n 'PinjamUang0PDP1': 6,\n 'KtaKilat01': 7,\n 'KtaKilat00': 8,\n 'KtaKilat0PDP1': 9,\n 'DanaCepat11': 10,\n 'DanaCepat10': 11,\n 'DanaCepat1PDP1': 12,\n 'PinjamUang11': 13,\n 'PinjamUang10': 14,\n 'PinjamUang1PDP1': 15,\n 'KtaKilat11': 16,\n 'KtaKilat10': 17,\n 'KtaKilat1PDP1': 18,\n 'DanaCepat0PDP2': 19,\n 'DanaCepat0PDP3': 20,\n 'DanaCepat03': 21,\n 'PinjamUang0PDP2': 22,\n 'PinjamUang0PDP3': 23,\n 'PinjamUang03': 24,\n 'KtaKilat0PDP2': 25,\n 'KtaKilat0PDP3': 26,\n 'KtaKilat03': 27,\n 'DanaCepat1PDP2': 28,\n 'DanaCepat1PDP3': 29,\n 'PinjamUang1PDP2': 30,\n 'PinjamUang1PDP3': 31,\n 'KtaKilat1PDP2': 32,\n 'KtaKilat1PDP3': 33,\n 'DanaCepat13': 36,\n 'PinjamUang13': 37,\n 'KtaKilat13': 38,\n 'DanaCepat12': 39,\n 'PinjamUang12': 40,\n 'KtaKilat12': 41,\n 'DanaCepat02': 42,\n 'PinjamUang02': 43,\n 'KtaKilat02': 44,\n 'IKIDana01': 100,\n 'IKIDana00': 101,\n 'IKIDana0PDP1': 102,\n 'IKIDana11': 103,\n 'IKIDana10': 104,\n 'IKIDana1PDP1': 105,\n 'IKIDana0PDP2': 106,\n 'IKIDana0PDP3': 107,\n 'IKIDana03': 108,\n 'IKIDana1PDP2': 109,\n 'IKIDana1PDP3': 110,\n 'IKIDana13': 111,\n 'IKIDana12': 112,\n 'IKIDana02': 113,\n }\n current_page += 1\n with db.atomic() as transaction:\n while True:\n bill_service = BillService()\n #获取当天到未来4天的到期bill_sub.origin_due_at\n ivr_action = bill_service.ivr_pages(\n page=current_page,\n page_size=500,\n start_time=utc_datetime(str(start)),\n end_time=utc_datetime(str(end)))\n result = ivr_action['result']\n page_size = int(ivr_action.get('page_size', 0))\n total_page = int(ivr_action.get('total_page', 0))\n\n insert_args = []\n for a in result:\n due_at = no_utc_datetime(a['due_at'])\n days = (due_at.date() - now).days\n if days == 2:\n continue\n if days > 0:\n time = str(days)\n else:\n # 上面通过时间控制了请求的数据,不会获取到逾期为两天的件\n time = str(days).replace('-', 'PDP')\n\n #su 该用户逾期多少次\n key = a['app_name'] + str(a['su']) + time\n group = auto_ivr.get(key)\n\n user_id = a['user_id']\n try:\n user_resp = (AccountService()\n .get_user(path_params={'user_id': user_id}))\n if str(user_resp['mobile_no']) == str(a['user_mobile_no']):\n numbers = a['user_mobile_no']\n else:\n numbers = (a['user_mobile_no'] +\n ',' + user_resp.get('mobile_no'))\n except:\n logging.error('Request Account Service Error.')\n numbers = a['user_mobile_no']\n\n insert_args.append({\n 'application_id': a['id'],\n 'numbers': numbers,\n 'group': group,\n 'user_id': user_id})\n\n AutoIVR.insert_many(insert_args).execute()\n if current_page == 1:\n IVRActionLog.create(total_page=total_page,\n proc_date=now,\n page_size=page_size,\n current_page=current_page)\n # 不知道什么原因,此处create不返回刚创建的对象\n item = IVRActionLog.get(IVRActionLog.proc_date == now)\n else:\n item.current_page = current_page\n item.page_size = page_size\n item.total_page = total_page\n item.save()\n transaction.commit()\n current_page += 1\n if current_page > int(total_page):\n break\n # try:\n # ivr_t2_test()\n # except Exception as e:\n # logging.error(\"ivr_test_error:%s\"%str(e))\n\n if sys_config and sys_config.value:\n try:\n classfiy_dpd_ptp_apps()\n except Exception as e:\n logging.error(\"dpd1-3_test_error:%s\"%str(e))\n\n\n\n# t-2进ivr测试代码\ndef ivr_t2_test():\n t2_groups = [39, 40, 41, 42, 43, 44]\n ivr_test_proportion = 0.2\n sys_config = (SystemConfig.select()\n .where(SystemConfig.key == 'IVR_TEST_PROPORTION')\n .first())\n if sys_config and sys_config.value:\n ivr_test_proportion = float(sys_config.value)\n # 获取所有t-2的件\n t2_ivrs = (AutoIVR.select()\n .where(AutoIVR.group << t2_groups,\n AutoIVR.status == AutoIVRStatus.AVAILABLE.value))\n t2_dict = defaultdict(list)\n # 每个group获取一定比例的件\n for ivr in t2_ivrs:\n t2_dict[ivr.group].append(ivr.id)\n test_ivr_ids = []\n for group, ivr_ids in t2_dict.items():\n number = ceil(len(ivr_ids) * ivr_test_proportion)\n test_ivr_ids += ivr_ids[:number]\n if not test_ivr_ids:\n return\n # 更新ivr状态\n q = (AutoIVR.update(status=AutoIVRStatus.SUCCESS.value)\n .where(AutoIVR.group << t2_groups,\n AutoIVR.id.not_in(test_ivr_ids))\n .execute())\n\n# 过滤到bomber中下p的件\ndef classfiy_dpd_ptp_apps():\n dpd_group = AutoIVR.dpd_groups()\n dpd1_3_ivr_pro = 0.2\n sys_config = (SystemConfig.select()\n .where(SystemConfig.key == 'DPD1-3_IVR_TEST')\n .first())\n if sys_config and sys_config.value:\n dpd1_3_ivr_pro = float(sys_config.value)\n # 获取有是有已经下p的件\n apps = (ApplicationR.select(ApplicationR.external_id)\n .where(ApplicationR.overdue_days < 4,\n ApplicationR.status != ApplicationStatus.REPAID.value,\n ApplicationR.promised_date >= date.today(),\n ApplicationR.promised_date.is_null(False)))\n apps_ids = [a.external_id for a in apps]\n # 删除ivr中下p的件\n if apps_ids:\n d = (AutoIVR.delete()\n .where(AutoIVR.application_id.in_(apps_ids),\n AutoIVR.group.in_(dpd_group))\n .execute())\n # 所有dpd1-3的件\n ivrs = (AutoIVR.select().where(AutoIVR.group.in_(dpd_group)))\n ivrs_dict = defaultdict(list)\n for ivr in ivrs:\n ivrs_dict[ivr.group].append(ivr.id)\n test_ivrs = []\n for group, ivr_ids in ivrs_dict.items():\n number = ceil(len(ivr_ids) * dpd1_3_ivr_pro)\n test_ivrs += ivr_ids[:number]\n if not test_ivrs:\n return\n # 更新不测试的数据的状态\n q = (AutoIVR.update(status=AutoIVRStatus.SUCCESS.value)\n .where(AutoIVR.group.in_(dpd_group),\n AutoIVR.id.not_in(test_ivrs))\n .execute())\n\n\n\n# APP 合并特殊处理\n@action(MessageAction.APP_MERGE)\n@deprecated(version='1.0', reason='This function will be removed soon')\ndef app_merge(payload, msg_id):\n\n # 将DPD未到4的提前拉近bomber\n sql = \"\"\"\n select *\n from (\n select a.id as id\n from dashboard.application as a\n inner join repayment.bill2 as b on b.external_id = a.id\n where not exists (\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.app = 'DanaCepat'\n and a.is_first_loan = 1\n and a.apply_at < '2018-08-23 20:50:00'\n and b.overdue_days between 1 and 3\n and b.status != 2) result\n where not exists (\n select 1\n from bomber.application as a\n where a.cycle = 1\n and a.status = 4\n and a.id = result.id\n )\n \"\"\"\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n new_data = cursor.fetchall()\n cursor.close()\n if new_data:\n bomber = [103, 104]\n for d in new_data:\n app_id = {'id': d[0]}\n application_overdue(app_id, None)\n\n # 将新进的件随机分给对应催收员\n (Application\n .update(status=ApplicationStatus.AB_TEST.value,\n latest_bomber=random.choice(bomber),\n ptp_bomber=None\n )\n .where(Application.id == d[0])\n ).execute()\n logging.warning('add new app success')\n\n # 重新登陆后,且没有ptp,将其从人工催收中删除\n ptp = date.today() - timedelta(days=1)\n del_sql = \"\"\"\n select a.id\n from bomber.application as a\n where exists(\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.cycle = 1\n and a.status = 4\n and (a.promised_date is null or a.promised_date < \"%s\")\n \"\"\" % ptp\n cursor = readonly_db.get_cursor()\n cursor.execute(del_sql)\n del_date = cursor.fetchall()\n cursor.close()\n if del_date:\n return\n ids = list()\n for d in del_date:\n ids.append(d[0])\n (Application\n .update(status=ApplicationStatus.UNCLAIMED.value,\n latest_bomber=None)\n .where(Application.id << ids)).execute()\n\n\n@action(MessageAction.APPLICATION_BOMBER)\ndef application_overdue(payload, msg_id):\n\n application_id = payload['id']\n sub_bill_id = payload['bill_sub_id']\n local_app = (Application.select()\n .where(Application.external_id == application_id)\n .order_by(Application.finished_at)\n .first())\n # 如果是单期且催收单存在\n if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:\n logging.info('application %s overdue, already exists', application_id)\n add_contact(local_app)\n return\n # 如果是分期,查看子账单是否存在\n if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:\n overdue_bill = (OverdueBillR.select()\n .where(OverdueBillR.sub_bill_id == sub_bill_id,\n OverdueBillR.external_id == application_id))\n if overdue_bill.exists():\n logging.info(\n 'application %s,sub_bill_id %s overdue, already exists' %\n (application_id, sub_bill_id))\n return\n\n try:\n sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])\n sub_bill = sub_bill[0]\n except Exception:\n logging.error('application %s overdue, get sub_bill info failed:'\n 'Request To repayment Error', application_id)\n return\n\n if sub_bill['status'] == 2:\n logging.error('application %s overdue, but bills already cleared',\n application_id)\n return\n\n overdue_days = sub_bill.get('overdue_days', 0)\n if overdue_days == 0:\n logging.info('application {} no overdue'\n .format(str(application_id)))\n return\n\n gold_eye = GoldenEye().get('/applications/%s' % application_id)\n if not gold_eye.ok:\n logging.error('get application %s failed: Request to GoldenEye.',\n application_id)\n return\n gold_app = gold_eye.json().get('data')\n user_id = gold_app['user_id']\n\n apply_history = Dashboard().get('/users/%s/apply-history' % user_id)\n if not apply_history.ok:\n logging.error('get user %s apply history failed: Request '\n 'to Dashboard Failed.', user_id)\n return\n history = apply_history.json().get('data')\n loan_success_times = len([1 for i in history\n if i['status'] in [80, 90, 100, 70] and\n i['id'] != gold_app['id']])\n\n id = application_id\n type = ApplicationType.CASH_LOAN.value\n bill_id = sub_bill.get(\"bill_id\")\n amount = sub_bill.get(\"amount\")\n amount_net = sub_bill.get('amount_net')\n interest_rate = sub_bill.get('interest_rate')\n overdue_days = sub_bill.get('overdue_days')\n origin_due_at = sub_bill.get('origin_due_at')\n sub_overdue_bill = {\n \"collection_id\": id,\n \"bill_id\": bill_id,\n \"sub_bill_id\": sub_bill_id,\n \"periods\": sub_bill.get(\"periods\"),\n \"overdue_days\": overdue_days,\n \"origin_due_at\": origin_due_at,\n \"amount\": amount,\n \"amount_net\": amount_net,\n \"interest_rate\": interest_rate,\n \"external_id\": application_id\n }\n # 根据催收单类型来生成id\n if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:\n if local_app and local_app.status != ApplicationStatus.REPAID.value:\n sub_overdue_bill[\"collection_id\"] = local_app.id\n local_app.amount += amount\n local_app.amount_net += amount_net\n local_app.save()\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info(\n \"application %s,sub_bill_id:%s overdue created\" %\n (application_id, sub_bill_id))\n return\n else:\n id = idg()\n type = ApplicationType.CASH_LOAN_STAGING.value\n sub_overdue_bill[\"collection_id\"] = id\n\n ptp_info = BombingHistory.filter(BombingHistory.application == id).first()\n\n promised_amount = ptp_info and ptp_info.promised_amount\n promised_date = ptp_info and ptp_info.promised_date\n\n application = Application.create(\n id=id,\n user_id=gold_app['user_id'],\n user_mobile_no=gold_app['user_mobile_no'],\n user_name=gold_app['id_name'],\n app=gold_app['app'],\n device_no=gold_app['device_no'],\n contact=json.dumps(gold_app.get('contact')),\n apply_at=gold_app.get('apply_date'),\n\n id_ektp=gold_app.get('id_ektp'),\n birth_date=birth_dt_ktp(gold_app.get('id_ektp')),\n gender=gender_ktpnum(gold_app.get('id_ektp')),\n\n profile_province=(gold_app.get('profile_province') or {}).get('name'),\n profile_city=(gold_app.get('profile_city') or {}).get('name'),\n profile_district=(gold_app.get('profile_district') or {}).get('name'),\n profile_residence_time=gold_app.get('profile_residence_time'),\n profile_residence_type=gold_app.get('profile_residence_type'),\n profile_address=gold_app.get('profile_address'),\n profile_education=gold_app.get('profile_education'),\n profile_college=(gold_app.get('profile_college') or {}).get('name'),\n\n job_name=gold_app.get('job_name'),\n job_tel=gold_app.get('job_tel'),\n job_bpjs=gold_app.get('job_bpjs'),\n job_user_email=gold_app.get('job_user_email'),\n job_type=gold_app.get('job_type'),\n job_industry=gold_app.get('job_industry'),\n job_department=gold_app.get('job_department'),\n job_province=(gold_app.get('job_province') or {}).get('name'),\n job_city=(gold_app.get('job_city') or {}).get('name'),\n job_district=(gold_app.get('job_district') or {}).get('name'),\n job_address=gold_app.get('job_address'),\n\n amount=amount,\n amount_net=amount_net,\n interest_rate=interest_rate,\n # late_fee_rate=bill.get('late_fee_rate'),\n # late_fee_initial=late_fee_initial,\n # late_fee=late_fee,\n # interest=interest,\n term=gold_app.get('term'),\n origin_due_at=origin_due_at,\n # due_at=bill.get('due_at'),\n overdue_days=overdue_days,\n\n repay_at=sub_bill.get('repay_at'),\n # principal_paid=principal_paid,\n # late_fee_paid=late_fee_paid,\n # repaid=repaid,\n # unpaid=unpaid,\n\n loan_success_times=loan_success_times,\n arrived_at=datetime.now(),\n follow_up_date=datetime.now(),\n\n promised_amount=promised_amount,\n promised_date=promised_date,\n external_id=application_id,\n type=type,\n bill_id=bill_id,\n dpd1_entry=datetime.now()\n )\n\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n\n logging.info('overdue application %s created', application_id)\n\n # new overdue application equals to 'escalate from 0 to 1'\n Escalation.create(\n application=id,\n type=EscalationType.AUTOMATIC.value,\n status=ApprovalStatus.APPROVED.value,\n current_cycle=0,\n escalate_to=1,\n )\n add_contact(application)\n\n\ndef add_contact(application):\n\n logging.info('start add contact for application: %s', application.id)\n\n # 添加联系人信息\n contacts = Contact.filter(\n Contact.user_id == application.user_id,\n )\n existing_numbers = {contact.number for contact in contacts}\n\n insert_contacts = list()\n\n mon_insert_contact = {}\n # applicant\n user_mobile_no = number_strip(application.user_mobile_no)\n if user_mobile_no and user_mobile_no not in existing_numbers:\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': application.user_name,\n 'number': user_mobile_no,\n 'relationship': Relationship.APPLICANT.value,\n 'source': 'apply info',\n 'real_relationship': Relationship.APPLICANT.value\n })\n existing_numbers.add(number_strip(application.user_mobile_no))\n\n extra_phone = GoldenEye().get(\n '/users/%s/extra-phone' % application.user_id\n )\n if not extra_phone.ok:\n extra_phone = []\n logging.error('get user %s extra contacts failed',\n application.user_id)\n else:\n extra_phone = extra_phone.json()['data']\n\n if extra_phone:\n for i in extra_phone:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': application.user_name,\n 'number': number,\n 'relationship': Relationship.APPLICANT.value,\n 'source': 'extra phone',\n 'real_relationship': Relationship.APPLICANT.value\n })\n key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n\n # family\n # ec contact\n ec_contact = []\n contact = json.loads(application.contact or '[]')\n for i in contact:\n if (number_strip(i['mobile_no']) not in existing_numbers and\n number_strip(i['mobile_no'])):\n ec_contact.append({\n 'user_id': application.user_id,\n 'name': i['name'],\n 'number': number_strip(i['mobile_no']),\n 'relationship': Relationship.FAMILY.value,\n 'sub_relation': SubRelation.EC.value,\n 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value\n })\n key = (user_mobile_no,\n number_strip(i['mobile_no']),\n ContactType.F_EC.value)\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['mobile_no']))\n if i['type'] != 1:\n continue\n if (number_strip(i['tel_no']) not in existing_numbers and\n number_strip(i['tel_no'])):\n ec_contact.append({\n 'user_id': application.user_id,\n 'name': i['name'],\n 'number': number_strip(i['tel_no']),\n 'relationship': Relationship.FAMILY.value,\n 'sub_relation': SubRelation.EC.value,\n 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value\n })\n key = (user_mobile_no,\n number_strip(i['tel_no']),\n ContactType.F_EC.value)\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['tel_no']))\n\n if ec_contact:\n Contact.insert_many(ec_contact).execute()\n\n # company\n if all((application.job_tel,\n number_strip(application.job_tel),\n number_strip(application.job_tel) not in existing_numbers)):\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': None,\n 'number': number_strip(application.job_tel),\n 'relationship': Relationship.COMPANY.value,\n 'source': 'basic info job_tel',\n 'real_relationship': Relationship.COMPANY.value\n })\n key = (user_mobile_no,\n number_strip(application.job_tel),\n ContactType.C_BASIC_INFO_JOB_TEL.value)\n mon_insert_contact[key] = 1, 0, None\n existing_numbers.add(number_strip(application.job_tel))\n\n # suggested\n\n sms_contacts = GoldenEye().get(\n '/applications/%s/sms-contacts' % application.external_id\n )\n if not sms_contacts.ok:\n sms_contacts = []\n logging.info('get user %s sms contacts failed', application.external_id)\n else:\n sms_contacts = sms_contacts.json()['data']\n\n if sms_contacts:\n for i in sms_contacts:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': i['name'][:128],\n 'number': number,\n 'relationship': Relationship.SUGGESTED.value,\n 'source': 'sms contacts',\n 'real_relationship': Relationship.SUGGESTED.value\n })\n key = (user_mobile_no,\n number,\n ContactType.S_SMS_CONTACTS.value)\n mon_insert_contact[key] = 1, 0, i['name'][:128]\n existing_numbers.add(number)\n\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n\n cf = GoldenEye().get(\n '/applications/%s/call/frequency' % application.external_id\n )\n if not cf.ok:\n call_frequency = []\n logging.error('get application %s call frequency error',\n application.external_id)\n else:\n call_frequency = cf.json()['data']\n\n # 结构不一样,重新生成\n insert_contacts = []\n fm = GoldenEye().get(\n '/applications/%s/contact/family-member' % application.external_id\n )\n if not fm.ok:\n family = []\n logging.error('get application %s family-member info error',\n application.external_id)\n else:\n family = fm.json()['data']\n if family:\n for i in family:\n if not (i.get('number')):\n logging.info('family member %s' % str(i))\n continue\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n logging.info('family members: %s' % str(i))\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': i['name'][:128],\n 'number': number,\n 'relationship': Relationship.FAMILY.value,\n 'source': FamilyContactType.CALLEC.value,\n 'total_count': i.get('total_count', 1),\n 'total_duration': i.get('total_duration', 0),\n 'real_relationship': Relationship.FAMILY.value\n })\n key = user_mobile_no, number, ContactType.F_CALL_EC.value\n mon_insert_contact[key] = (i.get('total_count', 1),\n i.get('total_duration', 0),\n i['name'][:128])\n existing_numbers.add(number)\n\n mon_update_contact = {}\n if call_frequency:\n with db.atomic():\n count = 1\n for i in call_frequency:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n (Contact\n .update(total_count=i['total_count'],\n total_duration=i['total_duration'])\n .where(Contact.number == number,\n Contact.user_id == application.user_id))\n key = user_mobile_no, number\n mon_update_contact[key] = (i['total_count'],\n i['total_duration'])\n continue\n\n # 设置通话频率最多的五个为family member\n if count < 6:\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': i['name'][:128],\n 'number': number,\n 'relationship': Relationship.FAMILY.value,\n 'total_count': i['total_count'],\n 'total_duration': i['total_duration'],\n 'source': FamilyContactType.CALLTOP5.value,\n 'real_relationship': Relationship.FAMILY.value\n })\n count += 1\n key = user_mobile_no, number, ContactType.F_CALL_TOP5.value\n mon_insert_contact[key] = (i['total_count'],\n i['total_duration'],\n i['name'][:128])\n else:\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': i['name'][:128],\n 'number': number,\n 'relationship': Relationship.SUGGESTED.value,\n 'total_count': i['total_count'],\n 'total_duration': i['total_duration'],\n 'source': 'call frequency',\n 'real_relationship': Relationship.SUGGESTED.value\n })\n key = (user_mobile_no,\n number,\n ContactType.S_CALL_FREQUENCY.value)\n mon_insert_contact[key] = (i['total_count'],\n i['total_duration'],\n i['name'][:128])\n\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n\n # 信用认证号码加入到本人\n next_apply_list = (AccountService().add_contact(application.user_id))\n\n for next_apply in next_apply_list:\n number = number_strip(str(next_apply))[:64]\n if number and number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=application.user_name,\n number=number,\n relationship=Relationship.SUGGESTED.value,\n source='online profile phone',\n real_relationship=Relationship.SUGGESTED.value\n )\n key = (user_mobile_no,\n number,\n ContactType.S_ONLINE_PROFILE_PHONE.value)\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n\n # 双卡手机另一个号码加入到本人队列\n next_applicant = GoldenEye().get(\n '/bomber/%s/dual_contact' % application.user_id\n )\n if not next_applicant.ok:\n next_applicant = []\n logging.error('get user %s dual_contact contacts failed'\n % application.user_id)\n else:\n next_applicant = next_applicant.json()['data']\n\n if next_applicant:\n for i in next_applicant:\n number = number_strip(str(i))[:64]\n if number and number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=application.user_name,\n number=number,\n relationship=Relationship.APPLICANT.value,\n source='apply info',\n real_relationship=Relationship.APPLICANT.value\n )\n key = user_mobile_no, number, ContactType.A_APPLY_INFO.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n\n # add new contact\n # 将同个ktp注册的多个号码添加到本人\n numbers = []\n try:\n numbers = (AccountService()\n .ktp_number(path_params={'user_id': application.user_id}))\n except Exception as e:\n logging.info('request ktp numbers failed %s' % str(e))\n\n for n in numbers:\n number = number_strip(str(n))[:64]\n if number and number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=application.user_name,\n number=number,\n relationship=Relationship.APPLICANT.value,\n source='ktp number',\n real_relationship=Relationship.APPLICANT.value\n )\n key = (user_mobile_no,\n number,\n ContactType.A_KTP_NUMBER.value)\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success'\n % application.user_id)\n\n # 将contact表中is_family为true的标记为ec\n try:\n ecs = GoldenEye().get(\n '/applications/%s/contact/ec' % application.external_id\n )\n except Exception as e:\n logging.info('request ec-member error: %s' % str(e))\n try:\n if not ecs.ok:\n ec = []\n logging.info('get application %s ec-member info error',\n application.external_id)\n else:\n ec = ecs.json()['data']\n\n if ec:\n for e in ec:\n number = number_strip(e['numbers'])[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=e['name'][:128],\n number=number,\n relationship=Relationship.FAMILY.value,\n source=FamilyContactType.CONTACTEC.value,\n real_relationship=Relationship.FAMILY.value\n )\n key = (user_mobile_no,\n number,\n ContactType.F_CONTACT_EC.value)\n mon_insert_contact[key] = 1, 0, e['name'][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add ec_member error:%s' % str(e))\n\n # 将contact中is_me标记为true的标记为本人\n try:\n mn = GoldenEye().get(\n '/applications/%s/contact/my_number' % application.external_id\n )\n except Exception as e:\n logging.info('request my_number error: %s' % str(e))\n try:\n if not mn.ok:\n my = []\n logging.info('get application %s my_number info error',\n application.external_id)\n else:\n my = mn.json()['data']\n\n if my:\n for m in my:\n number = number_strip(m)[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=my[m][:128],\n number=number,\n relationship=Relationship.SUGGESTED.value,\n source='my number',\n real_relationship=Relationship.SUGGESTED.value\n )\n key = user_mobile_no, number, ContactType.S_MY_NUMBER.value\n mon_insert_contact[key] = 1, 0, my[m][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add my_member error:%s' % str(e))\n\n # 得到company的号码\n try:\n cn = GoldenEye().get(\n '/applications/%s/contact/company-number' % application.external_id\n )\n except Exception as e:\n logging.info('request company-number error: %s' % str(e))\n try:\n if not cn.ok:\n cn = []\n logging.info('get application %s company_number info error',\n application.external_id)\n else:\n cn = cn.json()['data']\n\n if cn:\n for c in cn:\n number = c\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=cn[c][:128],\n number=number,\n relationship=Relationship.COMPANY.value,\n source='company',\n real_relationship=Relationship.COMPANY.value\n )\n key = user_mobile_no, number, ContactType.C_COMPANY.value\n mon_insert_contact[key] = 1, 0, cn[c][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add company_member error:%s' % str(e))\n\n # 得到本人在其他设备上登陆的sim联系方式,加入applicant中\n try:\n ol = (AccountService()\n .other_login_contact(userId=application.user_id))\n except Exception as e:\n logging.error('request other_login error: %s' % e)\n ol = {}\n\n try:\n for o in ol:\n number = number_strip(o)\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=ol[o][:128],\n number=number,\n relationship=Relationship.SUGGESTED.value,\n source='other_login',\n real_relationship=Relationship.SUGGESTED.value\n )\n key = (user_mobile_no,\n number,\n ContactType.S_OTHER_LOGIN.value)\n mon_insert_contact[key] = 1, 0, ol[o][:128]\n except Exception as e:\n logging.error('add other_login number error:%s' % e)\n\n logging.info('add contact for application %s finished', application.id)\n if mon_insert_contact or mon_update_contact:\n send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON,\n {\n 'user_mobile_no': user_mobile_no,\n 'insert_contact': str(mon_insert_contact),\n 'update_contact': str(mon_update_contact),\n 'user_id': application.user_id,\n 'name': application.user_name\n })\n\n\n@action(MessageAction.IMPORT_CONTACT_TO_MON)\ndef import_contact_to_mon(payload, msg_id):\n user_mobile_no = payload['user_mobile_no']\n insert_contact = eval(payload['insert_contact'])\n update_contact = eval(payload['update_contact'])\n user_id = payload['user_id']\n name = payload['name']\n\n if not (insert_contact or update_contact or user_mobile_no):\n logging.error(\"Invalid params\")\n drop_duplicated_contact({'numbers': [user_mobile_no]}, None)\n send_to_default_q(MessageAction.CONTACT_FROM_TOTAL, {\n 'number': user_mobile_no,\n 'user_id': user_id\n })\n return\n\n contacts = TotalContact.objects(src_number=user_mobile_no, is_calc=False)\n insert_list = []\n for c in contacts:\n key = (user_mobile_no, c.dest_number, c.source)\n if key in insert_contact:\n insert_contact.pop(key)\n\n for (sn, dn, s), (tc, td, na) in insert_contact.items():\n insert_list.append({\n 'src_number': sn,\n 'src_name': name,\n 'dest_number': dn,\n 'dest_name': na,\n 'source': s,\n 'total_count': tc,\n 'total_duration': td\n })\n\n if insert_list:\n insert_count = len((TotalContact\n .objects\n .insert([TotalContact(**dct)\n for dct in insert_list])))\n logging.info(\"insert success %s\", insert_count)\n\n update_count = 0\n for (sn, dn), (tc, td) in update_contact.items():\n result = (TotalContact\n .objects(src_number=sn, dest_number=dn, is_calc=False)\n .update(total_count=tc, total_duration=td))\n if result:\n update_count += 1\n logging.info(\"update success %s\", update_count)\n\n drop_duplicated_contact({'numbers': [user_mobile_no]}, None)\n send_to_default_q(MessageAction.CONTACT_FROM_TOTAL, {\n 'number': user_mobile_no,\n 'user_id': user_id\n })\n\n\n@action(MessageAction.DROP_DUPLICATED_CONTACT)\ndef drop_duplicated_contact(payload, msg_id):\n \"\"\"\n total_count,total_duration去重时,先total_count, 后total_duration\n\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n numbers = payload.get('numbers', [])\n if not numbers:\n logging.error(\"no numbers should drop\")\n\n query = (TotalContact\n .objects(Q(src_number__in=numbers) | Q(dest_number__in=numbers)))\n\n contact_list = defaultdict(list)\n delete_list = []\n insert_list = []\n for c in query:\n if c.src_number == c.dest_number:\n delete_list.append(c.id)\n\n key = c.src_number, c.dest_number, c.source\n contact_list[key].append({\n 'id': c.id,\n 'src_number': c.src_number,\n 'dest_number': c.dest_number,\n 'total_count': c.total_count,\n 'total_duration': c.total_duration,\n 'is_calc': c.is_calc,\n 'source': c.source,\n 'src_name': c.src_name,\n 'dest_name': c.dest_name\n })\n\n contact_list2 = deepcopy(contact_list)\n for key, info in contact_list.items():\n _info = sorted(info,\n key=lambda x: (not x['is_calc'],\n x['total_count'],\n x['total_duration']),\n reverse=True)\n rs = _info[0]\n if not rs['is_calc']:\n contact_list2[(key[1], key[0], key[2])].append({\n 'src_number': rs['dest_number'],\n 'dest_number': rs['src_number'],\n 'total_count': rs['total_count'],\n 'total_duration': rs['total_duration'],\n 'is_calc': True,\n 'source': rs['source'],\n 'id': '',\n 'src_name': rs['dest_name'],\n 'dest_name': rs['src_name']\n })\n delete_ids = [i['id'] for i in _info[1:] if i['id']]\n delete_list.extend(delete_ids)\n\n for key, info in contact_list2.items():\n _info = sorted(info,\n key=lambda x: (not x['is_calc'],\n x['total_count'],\n x['total_duration']),\n reverse=True)\n rs = _info[0]\n # 第一轮已经把不是反转的号码全部刷过\n if not rs['is_calc']:\n continue\n if not rs['id']:\n rs.pop('id')\n insert_list.append(rs)\n\n delete_ids = [i['id'] for i in _info[1:] if i['id']]\n delete_list.extend(delete_ids)\n\n if delete_list:\n delete_count = TotalContact.objects(id__in=delete_list).delete()\n logging.info(\"numbers %s: delete success %s\", numbers, delete_count)\n\n if insert_list:\n insert_count = len((TotalContact\n .objects\n .insert([TotalContact(**dct)\n for dct in insert_list])))\n logging.info(\"numbers %s: insert success %s\", numbers, insert_count)\n\n\ndef get_contact_from_mongo(number):\n if not number:\n return []\n\n query = (TotalContact\n .objects(src_number=number,\n source__in=TotalContact.available())\n .order_by('source'))\n lst = []\n for c in query:\n relation = TotalContact.relationship(c.source)\n if relation == -1:\n continue\n source = TotalContact.str_source(c.source)\n if not source:\n continue\n lst.append({\n 'related_number': c.dest_number,\n 'source': source,\n 'is_calc': c.is_calc,\n 'total_count': c.total_count,\n 'total_duration': c.total_duration,\n 'relation': relation,\n 'name': c.dest_name\n })\n return lst\n\n\n@action(MessageAction.CONTACT_FROM_TOTAL)\ndef contact_from_total(payload, msg_id):\n number = payload.get('number')\n user_id = payload.get('user_id')\n if not (number and user_id):\n logging.error(\"Invalid params\")\n return\n result = get_contact_from_mongo(number)\n if not result:\n logging.error(\"contact from mongo is none\")\n return\n\n contacts = Contact.filter(Contact.user_id == user_id)\n existing_numbers = {contact.number for contact in contacts}\n contact_list = []\n\n for c in result:\n number = number_strip(c['related_number'])\n if number in existing_numbers:\n continue\n\n contact_list.append({\n 'user_id': user_id,\n 'name': c['name'],\n 'number': number,\n 'relationship': c['relation'],\n 'source': c['source'],\n 'total_duration': c['total_duration'],\n 'total_count': c['total_count'],\n 'real_relationship': c['relation']\n })\n existing_numbers.add(number)\n\n if contact_list:\n Contact.insert_many(contact_list).execute()\n\n\n@action(MessageAction.BILL_REVOKE)\ndef bill_revoke(payload, msg_id):\n application_id = payload['external_id']\n if 'bill_sub_id' not in payload:\n bill_revoke_old(application_id)\n return\n # 子账单id\n sub_bill_id = payload['bill_sub_id']\n # java中还款时的唯一标志\n partner_bill_id = payload['partner_bill_id']\n\n application = (Application\n .filter(Application.external_id == application_id).first())\n\n if application.type == ApplicationType.CASH_LOAN_STAGING.value:\n # 根据子账单获取催收单的id\n application = (Application.select(Application)\n .join(OverdueBill,JOIN_LEFT_OUTER,\n on = Application.id == OverdueBill.collection_id)\n .where(OverdueBill.external_id == application_id,\n OverdueBill.sub_bill_id == sub_bill_id)\n .first())\n if not application:\n logging.info('application %s paid, not found application',\n application_id)\n return\n\n try:\n bill = BillService().sub_bill_list(bill_sub_ids = [sub_bill_id])\n bill = bill[0]\n except Exception:\n logging.error('application %s overdue, get bill info failed: '\n 'Request To Repayment Error', application_id)\n raise RuntimeError('Get repayment bills failed. {}'\n .format(str(application.id)))\n\n if bill.get('overdue_days') > 0 and bill.get('status') != 2:\n\n Application.update(\n status=ApplicationStatus.UNCLAIMED.value\n ).where(Application.id == application.id).execute()\n # 获取子账单\n overdue_bill = (OverdueBill\n .filter(OverdueBill.external_id == application_id,\n OverdueBill.sub_bill_id == sub_bill_id)\n .first())\n if not overdue_bill:\n logging.info(\"not find overdue_bill,sub_bill_id:%s,appid:%s\" %\n (sub_bill_id, application_id))\n return\n if overdue_bill.status == ApplicationStatus.REPAID.value:\n overdue_bill.status = ApplicationStatus.UNCLAIMED.value\n overdue_bill.finished_at = None\n overdue_bill.save()\n # 还款记录要置为无效\n RepaymentLog.update(\n no_active = 1\n ).where(RepaymentLog.partner_bill_id == partner_bill_id,\n RepaymentLog.overdue_bill_id == overdue_bill.id).execute()\n\n# 老数据消息处理\ndef bill_revoke_old(external_id):\n application = (Application.select()\n .where(Application.id == external_id)\n .first())\n if not application:\n logging.info(\"not get application\")\n return\n try:\n bill = BillService().bill_dict(\n application_id=external_id)\n except Exception:\n logging.error('application %s overdue, get bill info failed: '\n 'Request To Repayment Error', external_id)\n return\n if bill.get('overdue_days') >0 and bill.get(\"status\") != 2:\n q = (Application\n .update(status=ApplicationStatus.UNCLAIMED.value,\n repay_at=bill.get('repay_at'))\n .where(Application.id == external_id).execute())\n p = (OverdueBill.update(status=ApplicationStatus.UNCLAIMED.value)\n .where(OverdueBill.collection_id == external_id).execute())\n return\n\n\ndef check_key_not_none(payload, keys):\n for key in keys:\n if payload.get(key) is None:\n logging.error('Missing args {}'.format(str(key)))\n return False\n return True\n\n\n# 还款\n@action(MessageAction.BILL_PAID)\ndef bill_paid(payload, msg_id):\n # Don't use validator, it will throw exception\n validate = check_key_not_none(payload,\n ['external_id', 'late_fee_part',\n 'principal_part', 'paid_at','bill_sub_id',\n 'partner_bill_id'])\n if not validate:\n logging.error('payload key not fully pass in.')\n return\n\n external_id = payload['external_id']\n\n late_fee_part = Decimal(payload['late_fee_part'])\n principal_part = Decimal(payload['principal_part'])\n paid_at = payload['paid_at']\n partner_bill_id = payload['partner_bill_id']\n\n logging.debug('application %s paid principal part %s, paid late fee '\n 'part %s', external_id, principal_part, late_fee_part)\n\n application = (Application\n .filter(Application.external_id == external_id)\n .order_by(-Application.created_at)\n .first())\n if not application:\n logging.info('application %s paid, not found application',external_id)\n return\n\n # 获取期数\n sub_bill_id = payload['bill_sub_id']\n overdue_bill = (OverdueBillR.select()\n .where(OverdueBillR.collection_id == application.id,\n OverdueBillR.sub_bill_id == sub_bill_id)\n .first())\n if (application.type == ApplicationType.CASH_LOAN_STAGING.value\n and not overdue_bill):\n logging.info(\"bill sub not in bomber %s\",sub_bill_id)\n return\n with db.atomic():\n repay_at = str_no_utc_datetime(payload['latest_repay_at'])\n\n Application.update(\n repay_at=repay_at\n ).where(Application.id == application.id).execute()\n\n # 预测呼出系统上线后 全部认为 is_bombed = True\n\n RepaymentLog.create(\n application=application.id,\n is_bombed=True,\n current_bomber=application.latest_bomber_id,\n cycle=application.cycle,\n principal_part=principal_part,\n late_fee_part=late_fee_part,\n repay_at=paid_at,\n ptp_bomber=application.ptp_bomber,\n latest_call=application.latest_call,\n periods=overdue_bill.periods if overdue_bill else None,\n overdue_bill_id=overdue_bill.id if overdue_bill else None,\n partner_bill_id=partner_bill_id\n )\n\n # 智能催收 —— 催收号码进行排序\n phone_status = PhoneStatus.CONNECTED.value\n real_relationship = RealRelationship.user_values()\n commit = CallActionCommit.NO.value\n number = (CallActions.select(CallActions.number)\n .where(CallActions.phone_status == phone_status,\n CallActions.real_relationship << real_relationship,\n CallActions.commit == commit,\n CallActions.application == application.id)\n .order_by(-CallActions.created_at)\n .first())\n if number:\n (Contact.update(call_priority=PriorityStatus.REPAY.value)\n .where(Contact.user_id == application.user_id,\n Contact.call_priority == PriorityStatus.LAST.value)\n ).execute()\n\n (Contact.update(call_priority=PriorityStatus.LAST.value)\n .where(Contact.user_id == application.user_id,\n Contact.number == number.number)\n ).execute()\n\n if not application.latest_bomber_id:\n return\n\n Inbox.create(\n title='application %s,sub_bill_id %s repaid' % (\n application.external_id, sub_bill_id),\n content='application %s,sub_bill_id %s repaid' % (\n application.external_id, sub_bill_id),\n receiver=(application.latest_bomber_id or\n application.last_bomber_id),\n category=InboxCategory.REPAID.value,\n )\n\n\n@action(MessageAction.BILL_RELIEF)\ndef bill_relief(payload, msg_id):\n \"\"\"已废弃\"\"\"\n bill = payload['head_bill']\n\n repay_at = str_no_utc_datetime(bill['latest_repay_at'])\n updated_row = Application.update(\n repay_at=repay_at,\n ).where(Application.id == bill['external_id']).execute()\n\n logging.info('application %s bill relief done', bill['external_id'])\n return updated_row\n\n\n# 还款完成,\n@action(MessageAction.BILL_CLEARED)\n@action(MessageAction.BILL_CLEARED_BEFORE_CONFIRM)\ndef bill_cleared(payload, msg_id):\n \"\"\"\n BILL_CLEARED_BEFORE_CONFIRM仅在bomber系统中使用,MST清除账单时先修改其状态\n 为还款完成,让其不被催收\n \"\"\"\n external_id = payload.get('external_id')\n sub_bill_id = payload.get('bill_sub_id')\n if not external_id:\n logging.warning('payload has no external_id. {}'.format(str(payload)))\n return\n\n # 如果还清,清除不在拨打ivr\n AutoIVR.update(\n status=AutoIVRStatus.REPAID.value\n ).where(AutoIVR.application_id == external_id).execute()\n\n try:\n bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])\n bill = bill[0]\n except Exception:\n logging.error('get bill info failed: '\n 'Request To Repayment Error', external_id)\n return\n application = Application.filter(\n Application.external_id == external_id,\n Application.status << [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.BAD_DEBT.value,\n ApplicationStatus.AB_TEST.value]\n ).first()\n if not application:\n logging.info('application %s repay clear, not found bomber record',\n external_id)\n return\n\n with db.atomic():\n # 修改本次还清的自账单状态\n sub_bill_update = (OverdueBill.update(\n status = ApplicationStatus.REPAID.value,\n finished_at = datetime.now())\n .where(OverdueBill.collection_id == application.id,\n OverdueBill.sub_bill_id == sub_bill_id)\n .execute())\n # 如果是分期的件,判断是否完成还款\n overdue_bill = (OverdueBill.select()\n .where(OverdueBill.collection_id == application.id,\n OverdueBill.status != 2,\n OverdueBill.sub_bill_id != sub_bill_id))\n if overdue_bill.exists():\n if application.latest_bomber_id:\n Inbox.create(\n title='application %s sub_bill_id %s cleared' % (\n application.external_id, sub_bill_id),\n content='application %s sub_bill_id %s cleared' % (\n application.external_id, sub_bill_id),\n receiver=application.latest_bomber_id,\n category=InboxCategory.CLEARED.value,\n )\n return\n\n # 还款完成同步更新到外包\n partner = DispatchApp.filter(DispatchApp.application == application.id)\n if partner.exists():\n DispatchApp.update(\n status=DisAppStatus.ABNORMAL.value\n ).where(DispatchApp.application == application.id).execute()\n\n # 更新自动拨号系统队列 application 状态\n AutoCallList.update(\n status=AutoListStatus.REMOVED.value,\n description='bill clear'\n ).where(AutoCallList.application == application.id).execute()\n\n application.status = ApplicationStatus.REPAID.value\n application.finished_at = datetime.now()\n application.paid_at = datetime.now()\n # 如果逾期天数为0说明没有逾期,该件不应该进bomber\n if int(bill.get(\"overdue_days\")) <= 0:\n application.no_active = 1\n (RepaymentLog.update(no_active=1)\n .where(RepaymentLog.application == application.id)\n .execute())\n application.save()\n\n bomber_id = application.latest_bomber_id\n # c1b月底清件之后会入案,支付完成时要出案,2是默认的bomber_id\n if (application.cycle in (Cycle.C1A.value,Cycle.C1B.value) and\n not bomber_id):\n bomber_id = application.cycle\n if not bomber_id:\n return\n\n (DispatchAppHistory.update(\n out_at=datetime.now()\n ).where(\n DispatchAppHistory.application == application.id,\n DispatchAppHistory.bomber_id == bomber_id)).execute()\n\n if not application.latest_bomber_id:\n return\n\n item = (OldLoanApplication\n .get_or_none(OldLoanApplication.status ==\n OldLoanStatus.PROCESSING.value,\n OldLoanApplication.application_id ==\n application.id))\n if item:\n end_old_application(item, paid=True)\n out_record(src_bomber_id=bomber_id,\n application_ids=[item.application_id])\n\n Inbox.create(\n title='application %s cleared' % application.external_id,\n content='application %s cleared' % application.external_id,\n receiver=application.latest_bomber_id,\n category=InboxCategory.CLEARED.value,\n )\n\n\n# 同步bill2\n@action(MessageAction.OVERDUE_BILL_SYNC)\ndef overdue_bill_sync(payload, msg_id):\n \"\"\"已废弃\"\"\"\n bill2_list = payload\n updated_count = 0\n with db.atomic():\n for bill in bill2_list:\n\n principal = Decimal(bill['principal'])\n repay_at = str_no_utc_datetime(bill['latest_repay_at'])\n\n updated_count += Application.update(\n amount=principal,\n repay_at=repay_at,\n ).where(Application.id == bill['external_id']).execute()\n\n logging.info('overdue sync done, updated count: %s', updated_count)\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)\ndef calc_overdue_days_over(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n #更新逾期天数大于95天的件\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query = (Application\n .update(overdue_days=overdue_days)\n .where(Application.status <<\n [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.AB_TEST.value],\n Application.overdue_days > 95,\n Application.type == ApplicationType.CASH_LOAN.value))\n updated_rows_count = query.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count)\n\n try:\n calc_overdue_days_over_instalment()\n except Exception as e:\n logging.error(\"calc_overdue_days_over_instalment_error: %s\"%str(e))\n\n # 计算overdue_days后自动触发升级\n apps = Application.filter(\n Application.status << [ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value],\n Application.overdue_days > 95,\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(\n MessageAction.BOMBER_AUTOMATIC_ESCALATION,\n {'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n\n# 计算逾期天数超过95天的件的逾期天数\ndef calc_overdue_days_over_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.AB_TEST.value]\n for status in sub_bill_status_list:\n # 更新逾期天数\n query = (OverdueBill.update(overdue_days=overdue_days)\n .where(OverdueBill.status == status,\n OverdueBill.overdue_days > 95))\n updated_rows_count = query.execute()\n logging.info(\"calc_overdue_days_over_instalment done,count:%s,status:%s\" %\n (updated_rows_count, status))\n\n # 获取所有的子账单信息\n overdue_bills = (OverdueBill\n .select(OverdueBill.collection_id,\n OverdueBill.overdue_days)\n .join(Application, JOIN_LEFT_OUTER,\n on=OverdueBill.collection_id == Application.id)\n .where(Application.status == status,\n (Application.type ==\n ApplicationType.CASH_LOAN_STAGING.value)))\n # 获取每个分期催收单要更新的逾期天数\n app_update = {}\n for ob in overdue_bills:\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n # 更新催收单的逾期天数\n for aid, a_days in app_update.items():\n q = (Application.update(overdue_days=a_days)\n .where(Application.id == aid)\n .execute())\n logging.info(\"update instalment application done\")\n\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)\ndef calc_overdue_days(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query_unclaimed = (Application\n .update(overdue_days=overdue_days)\n .where(Application.status ==\n ApplicationStatus.UNCLAIMED.value,\n Application.overdue_days <= 95,\n (Application.type ==\n ApplicationType.CASH_LOAN.value)))\n updated_rows_count_unclaimed = query_unclaimed.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_unclaimed)\n\n query_processing = (Application\n .update(overdue_days=overdue_days)\n .where(Application.status ==\n ApplicationStatus.PROCESSING.value,\n Application.overdue_days <= 95,\n (Application.type ==\n ApplicationType.CASH_LOAN.value)))\n updated_rows_count_processing = query_processing.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_processing)\n\n query_test = (Application\n .update(overdue_days=overdue_days)\n .where(Application.status ==\n ApplicationStatus.AB_TEST.value,\n Application.overdue_days <= 95,\n (Application.type ==\n ApplicationType.CASH_LOAN.value)))\n updated_rows_count_test = query_test.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_test)\n\n # 分期账单计算逾期天数\n calc_overdue_days_instalment()\n\n # 计算overdue_days后自动触发升级\n apps = Application.select(Application.id).where(\n Application.status << [ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value],\n Application.overdue_days <= 95,\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(\n MessageAction.BOMBER_AUTOMATIC_ESCALATION,\n {'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n\n # overdue_days 计算完成后,修改C1A_entry(预期天数为4的设为C1A)\n Application.update(\n C1A_entry=datetime.now()\n ).where(\n Application.status << [ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value],\n Application.overdue_days == 4\n ).execute()\n\n# 分期的件计算逾期天数\ndef calc_overdue_days_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.AB_TEST.value]\n # 获取当月第一天的时间\n today_now_time = datetime.now()\n month_first_day = today_now_time.replace(day=1,\n hour=1,\n minute=30,\n second=0,\n microsecond=0)\n for status in sub_bill_status_list:\n # 更新逾期天数\n query = (OverdueBill.update(overdue_days = overdue_days)\n .where(OverdueBill.status == status,\n OverdueBill.overdue_days <= 95))\n updated_rows_count = query.execute()\n logging.info(\"calc_overdue_days_instalment done,count:%s,status:%s\" %\n (updated_rows_count, status))\n\n # 获取所有的子账单信息\n overdue_bills = (OverdueBill\n .select(OverdueBill.status,\n OverdueBill.created_at,\n OverdueBill.collection_id,\n OverdueBill.overdue_days)\n .join(Application, JOIN_LEFT_OUTER,\n on=OverdueBill.collection_id == Application.id)\n .where(Application.status == status,\n (Application.type ==\n ApplicationType.CASH_LOAN_STAGING.value)))\n # 获取每个分期催收单要更新的逾期天数\n app_update = {}\n for ob in overdue_bills:\n # 排除到分期这个月之前还款完成的那一期\n if (ob.status == ApplicationStatus.REPAID.value and\n ob.created_at < month_first_day):\n continue\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id],ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n # 更新催收单的逾期天数\n for aid,a_days in app_update.items():\n q = (Application.update(overdue_days = a_days)\n .where(Application.id == aid)\n .execute())\n logging.info(\"update instalment application done\")\n\n\n@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)\ndef automatic_escalation(payload, msg_id):\n app_ids = payload.get('application_list', [])\n if not app_ids:\n return\n # 过滤掉已完成的订单\n apps = (Application.select()\n .where(Application.id.in_(app_ids),\n Application.status != ApplicationStatus.REPAID.value))\n\n for a in apps:\n new_cycle = application_entry_different_calculations(a)\n if a.overdue_days < 90:\n logging.info(\n \"automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}\".format(\n a.id, new_cycle, a.cycle, a.overdue_days))\n if new_cycle > a.cycle:\n with db.atomic():\n if (a.latest_bomber_id or\n a.cycle in (Cycle.C1A.value, Cycle.C1B.value)):\n bomber_id = (a.latest_bomber_id\n if a.latest_bomber_id else a.cycle)\n (DispatchAppHistory.update(\n out_at=datetime.now(),\n out_overdue_days=a.overdue_days,\n ).where(\n DispatchAppHistory.application == a.id,\n DispatchAppHistory.bomber_id == bomber_id\n )).execute()\n\n Escalation.create(\n application=a.id,\n type=EscalationType.AUTOMATIC.value,\n status=ApprovalStatus.APPROVED.value,\n current_cycle=a.cycle,\n escalate_to=new_cycle,\n current_bomber_id=a.latest_bomber,\n )\n\n # 升级的时候如果是外包的件更新dispatch_app中的状态\n dis_app_update = (DispatchApp\n .update(status = DisAppStatus.ABNORMAL.value)\n .where(DispatchApp.application == a.id))\n dis_app_update.execute()\n a.cycle = new_cycle\n a.last_bomber = a.latest_bomber\n a.status = ApplicationStatus.UNCLAIMED.value\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n # 升级之后 拨打次数清零\n a.called_times = 0\n if new_cycle == Cycle.C1B.value:\n a.C1B_entry = datetime.now()\n elif new_cycle == Cycle.C2.value:\n a.C2_entry = datetime.now()\n elif new_cycle == Cycle.C3.value:\n a.C3_entry = datetime.now()\n a.save()\n logging.info('automatic escalation done')\n\n# 把部分件的进入C1B的时间改为10天\ndef application_entry_different_calculations(app):\n conf = {\n 1: [1, 10],\n 2: [11, 30],\n 3: [31, 60],\n 4: [61, 90],\n 5: [91, 999999],\n }\n for new_cycle,scopes in conf.items():\n if scopes[0] <= app.overdue_days <= scopes[1]:\n return new_cycle\n return app.cycle\n\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY)\ndef cron_summary(payload, msg_id):\n \"\"\"已废弃\"\"\"\n employees = Bomber.select(Bomber, Role).join(Role)\n summary = {\n i.id: {\n 'cycle': i.role.cycle,\n 'claimed': 0,\n 'completed': 0,\n 'cleared': 0,\n 'escalated': 0,\n 'transferred': 0,\n 'promised': 0,\n 'amount_recovered': Decimal(0),\n 'calls_made': 0,\n 'calls_connected': 0,\n 'sms_sent': 0,\n }\n for i in employees\n }\n # 每天 2点 15分 计算 昨天的情况\n now_date = date.today()\n cal_date = now_date - timedelta(days=1)\n # 当日下了多少ptp\n claimed = (Application\n .select(Application.latest_bomber,\n fn.COUNT(Application.id).alias('claimed'))\n .where(fn.DATE(Application.claimed_at) == cal_date,\n Application.status <<\n [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.REPAID.value],\n Application.latest_bomber.is_null(False))\n .group_by(Application.latest_bomber))\n\n # 当日ptp还款件数目\n cleared = (Application\n .select(Application.latest_bomber,\n fn.COUNT(Application.id).alias('cleared'))\n .where(fn.DATE(Application.finished_at) == cal_date,\n Application.status == ApplicationStatus.REPAID.value,\n Application.latest_bomber.is_null(False))\n .group_by(Application.latest_bomber))\n\n # 当日有多少个ptp被维护\n completed = (Application\n .select(Application.latest_bomber,\n fn.COUNT(Application.id).alias('completed'))\n .where(Application.latest_bombing_time.is_null(False),\n fn.DATE(Application.latest_bombing_time) == cal_date,\n Application.latest_bomber.is_null(False))\n .group_by(Application.latest_bomber))\n\n # 手工维护的件多少个件进入下一个cycle\n escalated = (Escalation\n .select(Escalation.current_bomber,\n fn.COUNT(Escalation.id).alias('escalated'))\n .where(fn.DATE(Escalation.created_at) == cal_date,\n Escalation.type == EscalationType.AUTOMATIC.value,\n Escalation.current_bomber.is_null(False),\n Escalation.status == ApprovalStatus.APPROVED.value)\n .group_by(Escalation.current_bomber))\n\n # 当日从某人手上移出多少个件\n transferred = (Transfer\n .select(Transfer.operator,\n fn.COUNT(Transfer.id).alias('transferred'))\n .where(fn.DATE(Transfer.reviewed_at) == cal_date,\n Transfer.status == ApprovalStatus.APPROVED.value)\n .group_by(Transfer.operator))\n\n # 当天的下p件有多少有进展\n promised = (\n BombingHistory\n .select(BombingHistory.bomber,\n fn.COUNT(BombingHistory.id).alias('promised'))\n .where(fn.DATE(BombingHistory.created_at) == cal_date,\n BombingHistory.result == BombingResult.HAS_PROGRESS.value)\n .group_by(BombingHistory.bomber)\n )\n\n # 当天催回的金额\n amount_recovered = (RepaymentLog\n .select(RepaymentLog.current_bomber,\n fn.SUM(RepaymentLog.principal_part)\n .alias('principal_part'),\n fn.SUM(RepaymentLog.late_fee_part)\n .alias('late_fee_part'))\n .where(fn.DATE(RepaymentLog.repay_at) == cal_date,\n RepaymentLog.is_bombed == True,\n RepaymentLog.current_bomber.is_null(False))\n .group_by(RepaymentLog.current_bomber))\n\n # calllog表已废弃\n calls_made = (CallLog\n .select(CallLog.user_id,\n fn.COUNT(CallLog.record_id).alias('calls_made'))\n .where(fn.DATE(CallLog.time_start) == cal_date,\n CallLog.system_type == '1')\n .group_by(CallLog.user_id))\n\n # calllog表已废弃\n calls_connected = (CallLog\n .select(CallLog.user_id,\n fn.COUNT(CallLog.record_id)\n .alias('calls_connected'))\n .where(fn.DATE(CallLog.time_start) == cal_date,\n CallLog.duration > 10,\n CallLog.system_type == '1').\n group_by(CallLog.user_id))\n\n # 当天发送的所有短信\n sms_sent = (ConnectHistory\n .select(ConnectHistory.operator,\n fn.COUNT(ConnectHistory.id).alias('sms_sent'))\n .where(ConnectHistory.type.in_(ConnectType.sms()),\n ConnectHistory.created_at >= cal_date,\n ConnectHistory.created_at < now_date\n )\n .group_by(ConnectHistory.operator))\n\n for i in claimed:\n summary[i.latest_bomber_id]['claimed'] += i.claimed\n\n for i in completed:\n summary[i.latest_bomber_id]['completed'] += i.completed\n\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n\n for i in escalated:\n summary[i.current_bomber_id]['escalated'] += i.escalated\n\n for i in transferred:\n summary[i.operator_id]['transferred'] += i.transferred\n\n for i in promised:\n summary[i.bomber_id]['promised'] += i.promised\n\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n\n for i in calls_made:\n summary[int(i.user_id)]['calls_made'] += i.calls_made\n\n for i in calls_connected:\n summary[int(i.user_id)]['calls_connected'] += i.calls_connected\n\n for i in sms_sent:\n summary[i.operator_id]['sms_sent'] += i.sms_sent\n\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({\n 'bomber': bomber_id,\n 'cycle': data['cycle'],\n 'claimed': data['claimed'],\n 'completed': data['completed'],\n 'cleared': data['cleared'],\n 'escalated': data['escalated'],\n 'transferred': data['transferred'],\n 'promised': data['promised'],\n 'amount_recovered': data['amount_recovered'],\n 'calls_made': data['calls_made'],\n 'calls_connected': data['calls_connected'],\n 'sms_sent': data['sms_sent'],\n 'date': cal_date,\n })\n\n if insert_args:\n Summary.insert_many(insert_args).execute()\n\n cycle_args = []\n # cal new in\n # 按照 cycle 统计\n escalated_in = (Escalation\n .select(Escalation.escalate_to,\n fn.COUNT(Escalation.id).alias('escalated_in'))\n .where(Escalation.status == ApprovalStatus.APPROVED.value,\n fn.DATE(Escalation.created_at) == cal_date)\n .group_by(Escalation.escalate_to))\n\n for i in escalated_in:\n cycle_args.append({\n 'cycle': i.escalate_to,\n 'escalated_in': i.escalated_in,\n 'date': cal_date,\n })\n\n amount_recovered_total = (\n RepaymentLog\n .select(RepaymentLog.cycle,\n fn.SUM(RepaymentLog.principal_part).alias('principal_part'),\n fn.SUM(RepaymentLog.late_fee_part).alias('late_fee_part'))\n .where(fn.DATE(RepaymentLog.repay_at) == cal_date)\n .group_by(RepaymentLog.cycle)\n )\n\n for i in amount_recovered_total:\n amount_recovered_total = i.principal_part + i.late_fee_part\n cycle_args.append({\n 'cycle': i.cycle,\n 'amount_recovered_total': amount_recovered_total,\n 'date': cal_date,\n })\n if cycle_args:\n Summary.insert_many(cycle_args).execute()\n\n logging.info('cal summary done')\n\n # 报表计算结束后 再更新逾期天数 触发自动升级\n send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY2)\ndef cron_summary2(payload, msg_id):\n \"\"\"已废弃,定时任务还在执行,具体情况待确定\"\"\"\n cal_date = date.today() - timedelta(days=1)\n employees = Bomber.select(Bomber, Role).join(Role)\n auto_call_actions = (\n AutoCallActions\n .select(\n AutoCallActions.bomber,\n AutoCallActions.result,\n fn.COUNT(AutoCallActions.id).alias('count')\n )\n .where(fn.DATE(AutoCallActions.created_at) == cal_date)\n )\n\n amount_recovered = (RepaymentLog\n .select(RepaymentLog.current_bomber,\n fn.SUM(RepaymentLog.principal_part)\n .alias('principal_part'),\n fn.SUM(RepaymentLog.late_fee_part)\n .alias('late_fee_part'))\n .where(fn.DATE(RepaymentLog.repay_at) == cal_date,\n RepaymentLog.current_bomber.is_null(False),\n RepaymentLog.is_bombed == True))\n\n cleared = (Application\n .select(Application.latest_bomber,\n fn.COUNT(Application.id).alias('cleared'))\n .where(fn.DATE(Application.finished_at) == cal_date,\n Application.status == ApplicationStatus.REPAID.value,\n Application.latest_bomber.is_null(False)))\n\n auto_call_actions = auto_call_actions.group_by(\n AutoCallActions.bomber, AutoCallActions.result\n )\n amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)\n cleared = cleared.group_by(Application.latest_bomber)\n\n summary = {\n e.id: {\n 'cycle': e.role.cycle,\n 'answered_calls': 0,\n 'ptp': 0,\n 'follow_up': 0,\n 'not_useful': 0,\n 'cleared': 0,\n 'amount_recovered': 0,\n }\n for e in employees\n }\n for a in auto_call_actions:\n summary[a.bomber_id]['answered_calls'] += a.count\n if a.result == AutoCallResult.PTP.value:\n summary[a.bomber_id]['ptp'] += a.count\n if a.result == AutoCallResult.FOLLOW_UP.value:\n summary[a.bomber_id]['follow_up'] += a.count\n if a.result == AutoCallResult.NOT_USEFUL.value:\n summary[a.bomber_id]['not_useful'] += a.count\n\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({\n 'bomber': bomber_id,\n 'cycle': data['cycle'],\n 'answered_calls': data['answered_calls'],\n 'ptp': data['ptp'],\n 'follow_up': data['follow_up'],\n 'not_useful': data['not_useful'],\n 'cleared': data['cleared'],\n 'amount_recovered': str(data['amount_recovered']),\n 'date': cal_date,\n })\n\n if insert_args:\n Summary2.insert_many(insert_args).execute()\n\n\n@action(MessageAction.BOMBER_SYNC_CONTACTS)\ndef sync_suggested_contacts(payload, msg_id):\n \"\"\" suggested contacts sync \"\"\"\n\n applications = (Application\n .select(Application.id, Application.user_id)\n .where(Application.status <<\n [ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value]))\n\n logging.debug('start sync contact')\n for a in applications:\n sync_contacts(a)\n logging.info('contact sync finished')\n\n\ndef sync_contacts(application):\n logging.info('application %s start sync contact', application.id)\n\n # 添加联系人信息\n contacts = Contact.filter(Contact.user_id == application.user_id)\n existing_numbers = {contact.number for contact in contacts}\n\n # sms contacts\n insert_contacts = []\n sms_contacts = GoldenEye().get(\n '/applications/%s/sms-contacts' % application.external_id\n )\n if not sms_contacts.ok:\n sms_contacts = []\n logging.info('get user %s sms contacts failed', application.external_id)\n else:\n sms_contacts = sms_contacts.json()['data']\n\n for i in sms_contacts:\n if i['number'] in existing_numbers:\n continue\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': i['name'],\n 'number': i['number'],\n 'relationship': Relationship.SUGGESTED.value,\n 'source': 'sms contacts',\n 'real_relationship': Relationship.SUGGESTED.value\n })\n existing_numbers.add(i['number'])\n\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n\n # call frequency\n insert_contacts = []\n cf = GoldenEye().get(\n '/applications/%s/call/frequency' % application.external_id\n )\n if not cf.ok:\n call_frequency = []\n logging.error('get application %s call frequency error',\n application.external_id)\n else:\n call_frequency = cf.json()['data']\n\n with db.atomic():\n for i in call_frequency:\n if i['number'] in existing_numbers:\n (Contact\n .update(total_count=i['total_count'],\n total_duration=i['total_duration'])\n .where(Contact.number == i['number'],\n Contact.user_id == application.user_id))\n continue\n\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': i['name'],\n 'number': i['number'],\n 'relationship': Relationship.SUGGESTED.value,\n 'total_count': i['total_count'],\n 'total_duration': i['total_duration'],\n 'source': 'call frequency',\n 'real_relationship': Relationship.SUGGESTED.value\n })\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n\n\n@action(MessageAction.BOMBER_AUTO_SMS)\n@deprecated(version='1.0', reason='This function will be removed soon')\ndef bomber_auto_sms(payload, msg_id):\n day_diff = int(payload['day_diff'])\n custom_type = payload.get('custom_type')\n msg_type = payload['msg_type']\n logging.info('auto sms %s sending', msg_type)\n\n applications = (\n Application\n .select()\n .where(Application.overdue_days == day_diff,\n Application.status << [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value],\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) < datetime.today().date()))\n )\n\n if custom_type == 'new':\n applications = applications.where(Application.loan_success_times < 3)\n if custom_type == 'old':\n applications = applications.where(Application.loan_success_times >= 3)\n\n templates = (\n Template.select(Template.text, Template.app)\n .where(Template.type == ConnectType.AUTO_SMS.value,\n Template.id << Template.get_auto_sms_tpl(msg_type))\n )\n tpl_text = dict()\n for tpl in templates:\n tpl_text[tpl.app] = tpl.text\n\n data_list = []\n for a in applications:\n tpl_data = {\n 'user_name': a.user_name,\n 'due_days': a.overdue_days,\n 'app_name': a.app,\n 'phone': a.user_mobile_no,\n 'cs_number': cs_number_conf.get(a.app, '02150202889'),\n }\n content = tpl_text[a.app].format(**tpl_data)\n data_list.append({\n 'phone': '62' + a.user_mobile_no,\n 'content': content,\n 'app': a.app,\n })\n\n if not data_list:\n logging.info('auto sms %s do not need sending', msg_type)\n return\n\n send_sms(data_list, msg_type, SmsChannel.NUSA.value)\n\n\n@action(MessageAction.BOMBER_AUTO_MESSAGE_DAILY)\ndef bomber_auto_message_daily(payload, msg_id):\n app_dict = dict(zip(AppName.keys(), AppName.values()))\n\n #当天自动外呼成功的电话记录\n auto_call_list = AutoCallActionsR \\\n .select(AutoCallActionsR.application_id) \\\n .where(fn.DATE(AutoCallActionsR.created_at) == fn.CURDATE())\n applications = (\n ApplicationR\n .select()\n .where(ApplicationR.overdue_days < 30,\n ApplicationR.overdue_days > 4,\n ApplicationR.type == ApplicationType.CASH_LOAN.value,\n ApplicationR.status << [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.AB_TEST.value],\n ApplicationR.promised_date.is_null(True) |\n (fn.DATE(ApplicationR.promised_date) < datetime.today().date()),\n ~(ApplicationR.id << auto_call_list))\n )\n stage_list1 = range(*AutoCallMessageCycle.NEW_STAGE1.value['scope'], 3) #5,8,11,14\n stage_list2 = range(*AutoCallMessageCycle.STAGE2.value['scope'], 3) #15,18\n stage_list3 = range(*AutoCallMessageCycle.STAGE3.value['scope'], 3)\n sms_list = defaultdict(list)\n fcm_list = defaultdict(list)\n for a in applications:\n overdue_type = ''\n if a.overdue_days in stage_list1:\n if a.loan_success_times < 3:\n overdue_type = AutoCallMessageCycle.NEW_STAGE1.value['type']\n else:\n overdue_type = AutoCallMessageCycle.OLD_STAGE1.value['type']\n if a.overdue_days in stage_list2:\n overdue_type = AutoCallMessageCycle.STAGE2.value['type']\n if a.overdue_days in stage_list3:\n overdue_type = AutoCallMessageCycle.STAGE3.value['type']\n if overdue_type == '':\n continue\n # format app name\n app_name = app_dict.get(a.app.upper(), AppName.default().value)\n try:\n tpl_id = Template.get_daily_auto_sms_tpl(overdue_type, app_name)\n except KeyError:\n logging.warning('Key error {}, id is {}'.format(\n (overdue_type, app_name), a.id))\n continue\n data_map = {\n 'user_name': a.user_name,\n 'app_name': app_name,\n 'overdue_days': a.overdue_days,\n 'cs_number': cs_number_conf.get(a.app, '')\n }\n sms_list[(overdue_type, tpl_id, a.app)].append({\n 'receiver': '62' + a.user_mobile_no,\n 'data_map': data_map\n })\n fcm_list[(overdue_type, tpl_id, a.app)].append({\n 'receiver': a.user_id,\n 'data_map': data_map\n })\n\n for (msg_type, tpl_id, app_name), data_list in sms_list.items():\n auto_send_sms_and_fcm(data_list, tpl_id, app_name, \"SMS\")\n for (msg_type, tpl_id, app_name), data_list in sms_list.items():\n auto_send_sms_and_fcm(data_list, tpl_id, app_name, \"FCM\")\n\n\n#分期逾期短信\n@action(MessageAction.BOMBER_INSTALMENT_AUTO_MESSAGE_DAILY)\ndef bomber_instalment_auto_message_daily(payload, msg_id):\n applications = (ApplicationR.select(ApplicationR.id,\n ApplicationR.app,\n ApplicationR.user_id,\n ApplicationR.user_name,\n ApplicationR.user_mobile_no,\n ApplicationR.loan_success_times,\n OverdueBillR.status,\n OverdueBillR.sub_bill_id,\n OverdueBillR.overdue_days, )\n .join(OverdueBillR, JOIN_LEFT_OUTER,\n on=ApplicationR.id == OverdueBillR.collection_id)\n .where(ApplicationR.type ==\n ApplicationType.CASH_LOAN_STAGING.value,\n ApplicationR.status != ApplicationStatus.REPAID.value,\n ApplicationR.overdue_days < 90,\n ApplicationR.promised_date.is_null(True) |\n (fn.DATE(\n ApplicationR.promised_date) < datetime.today().date()),\n )\n .dicts())\n # 计算真实的逾期天数和欠款情况\n app_overdues = {}\n for app in applications:\n if app[\"status\"] == ApplicationStatus.REPAID.value:\n continue\n if app[\"id\"] in app_overdues:\n overdue_days = app_overdues[app[\"id\"]][\"overdue_days\"]\n app_overdues[app[\"id\"]][\"overdue_days\"] = max(app[\"overdue_days\"],\n overdue_days)\n app_overdues[app[\"id\"]][\"bill_sub_ids\"].append(app[\"sub_bill_id\"])\n else:\n app_overdues[app[\"id\"]] = {\n \"app_name\": app[\"app\"],\n \"user_id\": app[\"user_id\"],\n \"user_name\": app[\"user_name\"],\n \"overdue_days\": app[\"overdue_days\"],\n \"bill_sub_ids\": [app[\"sub_bill_id\"]],\n \"phone\": '62' + app[\"user_mobile_no\"],\n \"loan_success_times\": app[\"loan_success_times\"],\n \"cs_number\": cs_number_conf.get(app[\"app\"], '02150202889')\n }\n # 获取需要发短信的催收单和计算对应的未支付金额\n sms_dict = {}\n sub_bill_ids = []\n send_message = defaultdict(list)\n send_fcm = defaultdict(list)\n for aid, app in app_overdues.items():\n message_id = Template.get_daily_instalment_auto_sms_tpl(\n overdue_days=app[\"overdue_days\"],\n loan_times=app[\"loan_success_times\"]\n )\n if message_id:\n app[\"tpl_id\"] = message_id\n sms_dict[aid] = app\n sub_bill_ids.extend(app[\"bill_sub_ids\"])\n if not sms_dict:\n logging.info(\"no application need send sms\")\n return\n sub_bills = []\n try:\n for index in range(0,len(sub_bill_ids),30):\n sub_bill = BillService().sub_bill_list(\n bill_sub_ids=sub_bill_ids[index:index+30])\n sub_bills += sub_bill\n except Exception as e:\n logging.info(\"send sms get bill error:%s\" % str(e))\n return\n sub_bills_dict = {int(sb[\"id\"]): sb for sb in sub_bills}\n for aid, app in sms_dict.items():\n amount = 0\n for sbid in app[\"bill_sub_ids\"]:\n amount += sub_bills_dict.get(sbid, {}).get(\"unpaid\", 0)\n data_map = {\n \"user_name\": app[\"user_name\"],\n \"app_name\": app[\"app_name\"],\n \"overdue_days\": app[\"overdue_days\"],\n \"cs_number\": app[\"cs_number\"],\n \"amount\": str(amount)\n }\n send_message[(app['tpl_id'], app[\"app_name\"])].append({\n \"receiver\": app[\"phone\"],\n \"data_map\": data_map\n })\n send_fcm[(app['tpl_id'], app[\"app_name\"])].append({\n \"receiver\": app[\"user_id\"],\n \"data_map\": data_map\n })\n for (tpl_id, app_name), data_list in send_message.items():\n auto_send_sms_and_fcm(data_list, tpl_id, app_name, \"SMS\")\n for (msg_type, tpl_id, app_name), data_list in send_fcm.items():\n auto_send_sms_and_fcm(data_list, tpl_id, app_name, \"FCM\")\n\n\n\ndef auto_send_sms_and_fcm(data_list, tpl_id, app_name, message_type):\n if not data_list:\n return\n # 200 条 一次请求\n for idx in range(0, len(data_list), 200):\n request_json = {\n \"app_name\": app_name,\n \"failed_retry\": True,\n \"is_masking\": True,\n \"list\": data_list[idx: idx+200],\n \"message_level\": 1,\n \"message_type\": message_type,\n \"sms_type\": 4 if message_type == \"SMS\" else 0,\n \"type_id\": tpl_id\n }\n try:\n result = MessageService().send_batch_template(**request_json)\n if not result.get(\"result\"):\n logging.error()\n except Exception as e:\n logging.error()\n return\n logging.info(\"\")\n\n\ndef get_danamall_msg_service(app_name, message_service):\n if app_name == AppName.DANAMALL.value:\n # token = app.config['service.message.%s.token' % app_name.lower()]\n message_service = Message(version=app_name)\n return message_service\n\n\n#催收员发送短信,提醒承诺时间\n@action(MessageAction.BOMBER_REMIND_PROMISE)\ndef bomber_remind_promise(payload, msg_id):\n day_diff = int(payload['day_diff'])\n msg_type = payload['msg_type']\n logging.info('auto sms %s sending', msg_type)\n\n applications = (\n Application\n .select()\n .where(\n fn.DATEDIFF(fn.NOW(), Application.promised_date) == day_diff,\n Application.status << [\n ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value,\n ]\n )\n )\n\n templates = (\n Template\n .select(Template.text, Template.app)\n .where(Template.type == ConnectType.AUTO_SMS.value,\n Template.id << Template.get_auto_sms_tpl(msg_type))\n )\n\n tpl_text = {tpl.app: tpl.text for tpl in templates}\n message_date_dict = defaultdict(list)\n for a in applications:\n tpl_data = {\n 'user_name': a.user_name,\n 'due_days': a.overdue_days,\n 'app_name': a.app,\n 'phone': a.user_mobile_no,\n 'cs_number': cs_number_conf.get(a.app, '02150202889'),\n 'promised_date': a.promised_date.strftime('%d-%m-%Y'),\n }\n content = tpl_text[a.app].format(**tpl_data)\n message_date_dict[a.app].append(\n {\n \"content\": content,\n \"receiver\": '62' + a.user_mobile_no,\n \"title\": \"\"\n }\n )\n\n for app_name, data_list in message_date_dict.items():\n send_sms(data_list, msg_type, app_name)\n\n\n\n@action(MessageAction.BOMBER_DISCOUNT_APPROVED)\ndef bomber_discount_approved(payload, msg_id):\n app_id = payload['id']\n msg_type = payload['msg_type']\n discount_to = payload['discount_to']\n effective_to = payload['effective_to']\n\n application = Application.filter(Application.id == app_id).first()\n if not application:\n logging.error('discount approved msg send failed '\n 'application %s not found', app_id)\n return\n template = (\n Template\n .select(Template.text, Template.app)\n .where(Template.type == ConnectType.AUTO_SMS.value,\n Template.id << Template.get_auto_sms_tpl(msg_type),\n Template.app == application.app)\n .first()\n )\n if not template:\n logging.error('discount approved msg send failed '\n 'template %s not found', msg_type)\n return\n\n promised_date = None\n if application.promised_date:\n promised_date = application.promised_date.strftime('%d-%m-%Y')\n tpl_data = {\n 'user_name': application.user_name,\n 'due_days': application.overdue_days,\n 'app_name': application.app,\n 'phone': application.user_mobile_no,\n 'cs_number': cs_number_conf.get(application.app, '02150202889'),\n 'promised_date': promised_date,\n 'discount_to': discount_to,\n 'effective_to': effective_to,\n }\n content = template.text.format(**tpl_data)\n\n data_list = [{\n 'receiver': '62' + application.user_mobile_no,\n 'content': content,\n 'title': \"\",\n }]\n send_sms(data_list, msg_type, application.app)\n\n\n# 批量发送自定义短信\ndef send_sms(data_list, msg_type, app_name):\n if not data_list:\n return\n for index in range(0, len(data_list), 200):\n req_data = {\n \"app_name\": app_name,\n \"failed_retry\": True,\n \"is_masking\": True,\n \"list\": data_list[index: index+200],\n \"message_level\": 0,\n \"message_type\": \"SMS\",\n \"sms_type\": 3\n }\n try:\n result = MessageService().send_batch(**req_data)\n if not result.get(\"result\"):\n logging.error(\n \"send_sms_failed:%s,req:%s,res:%s\",msg_type,req_data,result)\n except Exception as e:\n logging.error(\n \"send_sms_error:%s,req:%s,res:%s,error:%s\" % (\n msg_type, req_data, result, str(e)))\n return\n logging.info(\"send_sms_success:%s\", msg_type)\n\n#生成自动外呼,和分件\n@action(MessageAction.BOMBER_AUTO_CALL_LIST)\ndef bomber_auto_call_list(payload, msg_id):\n\n with db.atomic():\n #单期件分件,分给各期的外包后,余下分配内部指定id,的bomber\n #外包主要通过partner区分不同阶段,同时识别bomber中的partner_id来识别外包账号\n bomber_dispatch_app()\n\n # 分期件分件,分件主要靠installment 识别不同期的bomber\n dispatch_instalment_app()\n #分件记录\n dis_apps = (DispatchApp\n .select(DispatchApp.application)\n .where(DispatchApp.status == DisAppStatus.NORMAL.value))\n\n c1_apps = (\n Application\n .select(Application.id,\n Application.cycle,\n Application.follow_up_date,\n Application.called_times)\n .where(\n Application.status.not_in([ApplicationStatus.REPAID.value,\n ApplicationStatus.AB_TEST.value]),\n Application.cycle == Cycle.C1A.value,\n Application.is_rejected == False, # noqa\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) < datetime.today().date())\n ).order_by(Application.overdue_days, Application.apply_at)\n )\n dis_apps_ids = [da.application_id for da in dis_apps]\n\n insert_args = []\n\n for a in c1_apps:\n if a.id in dis_apps_ids:\n continue\n insert_args.append({\n 'application': a.id,\n 'cycle': a.cycle,\n 'follow_up_date': a.follow_up_date,\n 'called_times': 1 if a.called_times else 0,\n 'description': 'init'\n })\n\n if not insert_args:\n logging.error('no application need auto call')\n\n #检索application表,插入数据至auto_call_list\n with db.atomic():\n AutoCallList.delete().execute()\n for idx in range(0, len(insert_args), 100):\n AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()\n\n for idx in range(0, len(insert_args), 100):\n application_list = [\n i['application']\n for i in insert_args[idx:idx + 100]\n ]\n #获取校验后有效的电话号码\n send_to_default_q(\n MessageAction.BOMBER_AUTO_CALL_CONTACT,\n {'application_list': application_list}\n )\n\n logging.info('bomber generate auto call list finished')\n\n #将未下P,特定天数的件重分,即积压时间长的件,在分配\n send_to_default_q(\n MessageAction.UPDATE_BOMBER_FOR_SPECIAL,\n {})\n\n\nclass ChangeBomberTool(object):\n @staticmethod\n def in_record(bomber_id, ids, bd):\n subquery = (Application\n .select(Application.amount,\n fn.NOW().alias('created_at'),\n fn.NOW().alias('updated_at'),\n Application.id.alias('application_id'),\n R(str(bomber_id)).alias('bomber_id'),\n fn.NOW().alias('entry_at'),\n R('null').alias('partner_id'),\n SQL('DATE_ADD(CURDATE(),INTERVAL 14 DAY)')\n .alias('expected_out_time'),\n Application.overdue_days.alias(\n 'entry_overdue_days'))\n .where(Application.status !=\n ApplicationStatus.REPAID.value,\n Application.id << ids))\n\n (Application\n .update(latest_bomber=bomber_id)\n .where(Application.id.in_(ids))\n .execute())\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bd),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n @staticmethod\n def out_record(a, bd):\n _id = str(a.id)\n (DispatchAppHistory.update(\n out_at=datetime.now(),\n out_overdue_days=a.overdue_days,\n out_principal_pending=(\n a.amount -\n Decimal(bd[_id].get('principal_paid'))),\n out_late_fee_pending=(\n bd[_id].get('late_fee') -\n bd[_id].get('late_fee_paid')),\n )\n .where(\n DispatchAppHistory.application == a.id,\n DispatchAppHistory.bomber_id == a.latest_bomber_id\n )).execute()\n\n a.last_bomber = a.latest_bomber\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n a.save()\n\n @staticmethod\n def classify(l, b):\n if len(l) == 1:\n return l[0]\n _l = filter(lambda x: x['bomber'] != b, l)\n return min(_l, key=lambda x: len(x['ids']))\n\n\n@action(MessageAction.UPDATE_BOMBER_FOR_SPECIAL)\ndef update_bomber_for_special(payload, msg_id):\n \"\"\"\n cycle 1b 每天将DPD21且没有处于下P状态的件,分配给另一个催收员\n cycle 2 每天将DPD46且没有处于下P状态的件,分配给另一个催收员\n cycle 3 每天将dpd76且没有处于下p状态的件,分配给另一个催收员\n\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n filter_list = {Cycle.C1B.value: {\"overdue_days\": 21, \"role_id\": 5},\n Cycle.C2.value: {\"overdue_days\": 46, \"role_id\": 6},\n Cycle.C3.value: {\"overdue_days\": 76, \"role_id\": 8}}\n cbt = ChangeBomberTool()\n for cycle, values in filter_list.items():\n overdue_days = values[\"overdue_days\"]\n bombers = (Bomber.select()\n .where(Bomber.role == values[\"role_id\"],\n Bomber.instalment == 0,\n Bomber.is_del == 0))\n bids = {b.id:b for b in bombers}\n apps = (Application.select()\n .where(Application.cycle == cycle,\n Application.type == ApplicationType.CASH_LOAN.value,\n Application.overdue_days == overdue_days,\n Application.status == ApplicationStatus.AB_TEST.value,\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) < date.today()),\n Application.latest_bomber_id.in_(list(bids.keys()))))\n classify_dict = defaultdict(list)\n for b in bombers:\n classify_dict[b.group_id].append({\"bomber\": b.id, \"ids\": []})\n with db.atomic():\n app_ids = [i.id for i in apps]\n if app_ids and bids:\n bills = BillService().bill_list(application_ids=app_ids)\n bill_dict = {str(bill['application_id']): bill for bill in\n bills}\n for i in apps:\n current_bomber = bids.get(i.latest_bomber_id)\n if not current_bomber:\n continue\n classify_list = classify_dict.get(current_bomber.group_id)\n d = cbt.classify(classify_list, i.latest_bomber_id)\n d[\"ids\"].append(i.id)\n cbt.out_record(i, bill_dict)\n for group_id, cl_list in classify_dict.items():\n for item in cl_list:\n cbt.in_record(item[\"bomber\"], item[\"ids\"], bill_dict)\n else:\n logging.info(\n \"cycle:{} empty application list {} or bomber list {}\".format(\n cycle, app_ids, list(bids.keys())))\n try:\n update_bomber_for_special_instalment()\n except Exception as e:\n logging.error(\"special_instalment_error:%s\"%str(e))\n\n# 分期c2,c3特殊分件\ndef update_bomber_for_special_instalment():\n filter_list = {Cycle.C1B.value: 21, Cycle.C2.value: 46, Cycle.C3.value: 76}\n for cycle,overdue_days in filter_list.items():\n # 获取分期指定的催收员\n bombers = (Bomber.select().where(Bomber.instalment == cycle,\n Bomber.is_del == 0))\n bids = {b.id:b for b in bombers}\n # 获取催收单\n apps = (Application.select()\n .where(Application.cycle == cycle,\n Application.status == ApplicationStatus.AB_TEST.value,\n Application.type ==\n ApplicationType.CASH_LOAN_STAGING.value,\n Application.overdue_days == overdue_days,\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) < date.today()),\n Application.latest_bomber_id.in_(list(bids.keys()))))\n\n classify_dict = defaultdict(list)\n for b in bombers:\n classify_dict[b.group_id].append({\"bomber\":b.id, \"ids\":[]})\n for a in apps:\n current_bomber = bids.get(a.latest_bomber_id)\n if not current_bomber:\n continue\n classify_list = classify_dict.get(current_bomber.group_id)\n d = ChangeBomberTool.classify(classify_list, a.latest_bomber_id)\n d[\"ids\"].append(a.id)\n with db.atomic():\n for group_id,classify_list in classify_dict.items():\n for cl in classify_list:\n aids = cl[\"ids\"]\n if not aids:\n continue\n latest_bomber_id = cl[\"bomber\"]\n q = (Application.update(latest_bomber = latest_bomber_id,\n last_bomber = Application.latest_bomber)\n .where(Application.id << aids)\n .execute())\n record_param = {\n \"cycle\": cycle,\n \"application_ids\": aids,\n \"dest_bomber_id\": latest_bomber_id,\n }\n out_and_in_record_instalment(**record_param)\n\n\n\ndef bomber_dispatch_app():\n\n # 将单期件c1a分件给外包,外包需设置,partner\n try:\n c1a_dispatch_app()\n except Exception as e:\n logging.error(\"c1a_dispatch_app error:%s\"%str(e))\n\n cycle = {\n 1: 10,\n 2: 30,\n 3: 60,\n 4: 90\n }\n\n # 单期外包 Cycle.C2 overdue_day 31\n apps = (Application.select()\n .where(fn.DATE(Application.C2_entry) == date.today(),\n Application.type == ApplicationType.CASH_LOAN.value))\n\n partners = (Partner.select()\n .where(Partner.status == PartnerStatus.NORMAL.value,\n Partner.cycle == Cycle.C2.value))\n\n apps_ids = [a.id for a in apps]\n dispatch_inserts = []\n start_index = 0\n apps_length = len(apps_ids)\n logging.warning('apps length %s' % str(apps_length))\n\n for p in partners: # 目前就一个partner\n bombers = (Bomber.select()\n .where(Bomber.partner == p.id,\n Bomber.status != BomberStatus.OUTER_LEADER.value,\n Bomber.is_del == 0))\n\n gen = CycleIter([b.id for b in bombers])\n existing_list = []\n\n end_index = start_index + int(apps_length * p.app_percentage)\n logging.info('partner length %s' % str(end_index))\n\n if not apps_ids[start_index:end_index]:\n continue\n bills = BillService().bill_list(\n application_ids=apps_ids[start_index:end_index])\n bill_dict = {bill['application_id']: bill for bill in bills}\n\n for a_id in apps_ids[start_index:end_index]:\n bomber = average_gen(gen, existing_list)\n q = (DispatchApp.delete()\n .where(DispatchApp.application == a_id)\n .execute())\n dispatch_inserts.append({\n 'application': a_id,\n 'bomber': bomber,\n 'partner': p.id,\n })\n\n # 件分给外包后,对数据进行备份以备数据分析\n application = (Application.select()\n .where(Application.id == a_id)).first()\n application.latest_bomber = bomber\n application.status = ApplicationStatus.AB_TEST.value\n application.ptp_bomber = None\n application.save()\n day_next_cycle = (cycle.get(application.cycle) -\n application.overdue_days)\n DispatchAppHistory.create(\n application=a_id,\n partner_id=p.id,\n bomber_id=bomber,\n entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=(\n application.amount -\n Decimal(bill_dict[a_id].get('principal_paid'))),\n entry_late_fee_pending=(\n Decimal(bill_dict[a_id].get('late_fee')) -\n Decimal(bill_dict[a_id].get('late_fee_paid'))),\n expected_out_time=(date.today() +\n timedelta(days=day_next_cycle))\n )\n\n start_index = end_index\n\n with db.atomic():\n for idx in range(0, len(dispatch_inserts), 100):\n DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()\n\n # AB test 分件(人工维护分件)\n\n config = SystemConfig.prefetch(SCI.AB_TEST_C2)\n c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)\n # 余下的单期件分给内部指定催收员id [76, 100, 106, 107, 213, 215, 216, 221, 222, 223, 226, 235]\n c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)\n #python库的application id\n c2 = apps_ids[start_index:]\n if c2:\n bills = BillService().bill_list(\n application_ids=c2)\n else:\n bills = []\n #java库的bill\n bill_dict = {bill['application_id']: bill for bill in bills}\n logging.info('c2 AB_test length: %s' % str(c2))\n gen = CycleIter(c2_bomber)\n existing_list = []\n for c in c2:\n bomber = average_gen(gen, existing_list)\n application = Application.filter(Application.id == c).first()\n application.status = ApplicationStatus.AB_TEST.value\n application.latest_bomber = bomber\n application.ptp_bomber = None\n application.save()\n\n day_next_cycle = 46 - application.overdue_days\n DispatchAppHistory.create(\n application=c,\n bomber_id=bomber,\n entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=(application.amount\n - bill_dict[c].get('principal_paid', 0)),\n entry_late_fee_pending=(\n bill_dict[c].get('late_fee', 0) -\n bill_dict[c].get('late_fee_paid', 0)),\n expected_out_time=(date.today() + timedelta(days=day_next_cycle))\n )\n ab_test_other()\n\n\n\n# 单期的件部分分给外包,内部的C1a 不用分件进入自动外呼\ndef c1a_dispatch_app():\n today = datetime.today().date()\n tomorrow = today + timedelta(days=1)\n #获取单期的件\n c1a_apps = (Application.select()\n .where(Application.status << [ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value],\n Application.dpd1_entry >= today,\n Application.dpd1_entry < tomorrow,\n Application.type == ApplicationType.CASH_LOAN.value))\n all_aids = [a.id for a in c1a_apps]\n # 获取外包部门\n partners = (Partner.select()\n .where(Partner.status == PartnerStatus.NORMAL.value,\n Partner.cycle == Cycle.C1A.value))\n end = 0\n for p in partners:\n #直接通过partner 获取bomber\n bombers = (Bomber.select()\n .where(Bomber.partner == p.id,\n Bomber.is_del == 0))\n start = end\n end += int(len(all_aids) * p.app_percentage)\n aids = all_aids[start:end]\n bids = [b.id for b in bombers]\n if not bids or not aids:\n continue\n # 获取每个外包应该分到的件的个数\n average_number = get_average_number(len(aids),len(bids))\n p_end = 0\n for i,bid in enumerate(bids):\n p_start = p_end\n p_end += average_number[i]\n b_aids = aids[p_start:p_end]\n with db.atomic():\n q = (Application\n .update(latest_bomber = bid,\n status = ApplicationStatus.AB_TEST.value)\n .where(Application.id << b_aids)\n .execute())\n params = {\n \"cycle\": Cycle.C1A.value,\n \"dest_partner_id\": p.id,\n \"application_ids\": b_aids,\n \"dest_bomber_id\": bid\n }\n new_in_record(**params)\n try:\n dispatch_inserts = []\n for aid in b_aids:\n dispatch_inserts.append({'application': aid,\n 'bomber': bid,\n 'partner': p.id,\n 'status': DisAppStatus.NORMAL.value})\n if dispatch_inserts:\n q = (DispatchApp.insert_many(dispatch_inserts).execute())\n except Exception as e:\n logging.error(\"c1a分件写入dispatch_app error:%s\"%str(e))\n\n\ndef ab_test_other():\n cycle_upper = {\n 1: 10,\n 2: 30,\n 3: 60,\n 4: 76\n }\n\n c1b = (Application.select()\n .where(fn.DATE(Application.C1B_entry) == date.today(),\n Application.type == ApplicationType.CASH_LOAN.value)\n .order_by(-Application.overdue_days)\n )\n c1b_id = [a.id for a in c1b]\n\n dis_app_update = (DispatchApp.update(status=DisAppStatus.ABNORMAL.value)\n .where(DispatchApp.application.in_(c1b_id)))\n dis_app_update.execute()\n\n c3 = (Application.select()\n .where(fn.DATE(Application.C3_entry) == date.today(),\n Application.type == ApplicationType.CASH_LOAN.value))\n all_id = [b.id for b in c3]\n\n try:\n # 将C3的件一部分分配给外包\n partners = (Partner.select()\n .where(Partner.status == PartnerStatus.NORMAL.value,\n Partner.cycle == Cycle.C3.value))\n\n start_index, end_index, out_apps = 0, 0, {}\n for p in partners:\n end_index += int(len(all_id) * p.app_percentage)\n out_apps[p.id] = all_id[start_index:end_index]\n start_index = end_index\n c3_id = all_id[end_index:]\n allot_c3_case(out_apps)\n except:\n c3_id = all_id\n\n config = SystemConfig.prefetch(SCI.AB_TEST_C1B, SCI.AB_TEST_C3)\n c1b_bomber = config.get(SCI.AB_TEST_C1B, SCI.AB_TEST_C1B.default_value)\n c3_bomber = config.get(SCI.AB_TEST_C3, SCI.AB_TEST_C3.default_value)\n # 过滤掉催分期的催收员\n c3_bomber = get_cash_bomber(c3_bomber, Cycle.C3.value)\n data = [{'ids': c1b_id, 'bomber': c1b_bomber, 'index': 0, 'cycle': 2},\n {'ids': c3_id, 'bomber': c3_bomber, 'index': 1, 'cycle': 4}]\n\n for d in data:\n applications = d.get('ids')\n length = len(applications)\n end = int(length * d.get('index'))\n gen = CycleIter(d.get('bomber'))\n existing_list = []\n if not applications:\n continue\n bills = BillService().bill_list(\n application_ids=applications)\n bill_dict = {bill['application_id']: bill for bill in bills}\n for a in applications[:end]:\n bomber = average_gen(gen, existing_list)\n application = Application.filter(Application.id == a).first()\n application.status = ApplicationStatus.AB_TEST.value\n application.latest_bomber = bomber\n application.ptp_bomber = None\n application.save()\n\n day_next_cycle = (cycle_upper.get(application.cycle) -\n application.overdue_days)\n DispatchAppHistory.create(\n application=a,\n bomber_id=bomber,\n entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n\n entry_principal_pending=(application.amount -\n bill_dict[a]['principal_paid']),\n entry_late_fee_pending=(bill_dict[a]['late_fee'] -\n bill_dict[a]['late_fee_paid']),\n expected_out_time=(date.today() +\n timedelta(days=day_next_cycle))\n )\n\n # 根据partner表中的配置给外包团队分件。\n if d.get('cycle') == Cycle.C1B.value:\n c1b_wb_partner = (Partner.select()\n .where(Partner.cycle == Cycle.C1B.value,\n Partner.status ==\n PartnerStatus.NORMAL.value))\n # 获取c1b外包团队\n c1b_wb_p_dict = { str(p.id):p.app_percentage for p in c1b_wb_partner}\n c1b_wb_pids = list(map(int, c1b_wb_p_dict.keys()))\n c1b_wb_bombers = (Bomber.select()\n .where(Bomber.is_del == 0,\n Bomber.partner_id << c1b_wb_pids,\n Bomber.password.is_null(False)))\n # 获取每个外包团队的成员和团队应分的件数\n c1b_wb_pba = {}\n apps_num = len(applications)\n for cb in c1b_wb_bombers:\n cb_key = str(cb.partner_id)\n if cb_key in c1b_wb_pba:\n c1b_wb_pba[cb_key][\"bids\"].append(cb.id)\n else:\n # 获取比例,计算分配给外包的件的个数\n start = end\n percentage = c1b_wb_p_dict.get(cb_key, 0)\n end = start + ceil(apps_num * percentage)\n c1b_wb_pba[cb_key] = {\n \"bids\": [cb.id],\n \"pid\": cb.partner_id,\n \"apps\": applications[start:end]\n }\n # 获取现金贷c1b新件剩余的件\n inner_c1b_apps = applications[end:]\n dispatch_c1b_inner_apps(aids=inner_c1b_apps,\n bills=bill_dict,\n period=cycle_upper.get(Cycle.C1B.value))\n for pid,c1b_wb in c1b_wb_pba.items():\n c1b_wb_apps = c1b_wb[\"apps\"]\n c1b_wb_bids = c1b_wb[\"bids\"]\n average_nums = get_average_number(len(c1b_wb_apps),\n len(c1b_wb_bids))\n bid_end = 0\n for b_index,bid in enumerate(c1b_wb_bids):\n bid_start = bid_end\n bid_end = bid_start + average_nums[b_index]\n bid_apps = c1b_wb_apps[bid_start:bid_end]\n logging.info(\"c1b_分件:bid:%s,bid_apps:%s\"%(bid, bid_apps))\n with db.atomic():\n app_sql = (Application.update(latest_bomber=bid,\n status=ApplicationStatus.AB_TEST.value,\n ptp_bomber=None)\n .where(Application.id << bid_apps))\n app_sql.execute()\n params = {\n \"apps\":bid_apps,\n \"partner_id\": int(pid),\n \"bill_dict\": bill_dict,\n \"period\": cycle_upper.get(Cycle.C1B.value),\n \"bomber_id\":bid\n }\n c1b_dispatch_in_record(**params)\n try:\n for aid in bid_apps:\n dispatch_inserts = {\n 'application': aid,\n 'bomber': bid,\n 'partner': int(pid),\n 'status': DisAppStatus.NORMAL.value,\n }\n q = (DispatchApp.update(**dispatch_inserts)\n .where(DispatchApp.application == aid)\n .execute())\n if not q:\n DispatchApp.create(**dispatch_inserts)\n except Exception as e:\n logging.error(\"dispatchApp插入失败:%s\"%str(e))\n\n\ndef allot_c3_case(out_data):\n dispatch_inserts = []\n for key, value in out_data.items():\n if not value:\n continue\n\n bombers = (Bomber\n .filter(Bomber.partner == key,\n Bomber.status == BomberStatus.OUTER.value,\n Bomber.is_del == 0))\n bomber_ids = [b.id for b in bombers]\n bomber = CycleIter(bomber_ids)\n bills = BillService().bill_list(application_ids=value)\n bill_dict = {bill['application_id']: bill for bill in bills}\n\n for v in value:\n bomber_id = bomber.__next__()\n q = (DispatchApp.delete()\n .where(DispatchApp.application == v)\n .execute())\n dispatch_inserts.append({\n 'application': v,\n 'bomber': bomber_id,\n 'partner': key,\n })\n\n # 对数据进行备份以备数据分析\n application = (Application.filter(Application.id == v)).first()\n application.latest_bomber = bomber_id\n application.ptp_bomber = None\n application.status = ApplicationStatus.AB_TEST.value\n application.save()\n\n # c3进入下一个cycle时逾期天数为90天\n day_next_cycle = (90 - application.overdue_days)\n DispatchAppHistory.create(\n application=v,\n partner_id=key,\n bomber_id=bomber_id,\n entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=(\n application.amount -\n Decimal(bill_dict[v].get('principal_paid'))),\n entry_late_fee_pending=(\n Decimal(bill_dict[v].get('late_fee')) -\n Decimal(bill_dict[v].get('late_fee_paid'))),\n expected_out_time=(\n date.today() + timedelta(days=day_next_cycle))\n )\n\n with db.atomic():\n for idx in range(0, len(dispatch_inserts), 100):\n DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()\n\n# 获取只催单期的催收员\ndef get_cash_bomber(bids, cycle):\n cash_bombers = (Bomber.select()\n .where(Bomber.id << bids,\n Bomber.is_del == 0,\n Bomber.instalment != cycle))\n cash_bids = [b.id for b in cash_bombers]\n return cash_bids\n\n# c1b 单期的件分件给内部员工\ndef dispatch_c1b_inner_apps(aids, bills, period=30):\n # 获取需要分件的员工\n bombers = (Bomber.select()\n .where(Bomber.role_id == 5,\n Bomber.is_del == 0,\n Bomber.instalment == 0))\n bids = [b.id for b in bombers]\n if not aids or not bids:\n return\n avg_num = get_average_number(len(aids),len(bids))\n end = 0\n with db.atomic():\n for index,b in enumerate(bids):\n start = end\n end = start + avg_num[index]\n b_aids = aids[start:end]\n app_sql = (Application.update(latest_bomber=b,\n status=ApplicationStatus.AB_TEST.value,\n ptp_bomber=None)\n .where(Application.id << b_aids))\n app_sql.execute()\n params = {\n \"apps\": b_aids,\n \"bill_dict\": bills,\n \"period\": period,\n \"bomber_id\": b\n }\n c1b_dispatch_in_record(**params)\n\n# 将分期的件分配给员工\ndef dispatch_instalment_app():\n\n cycle_list = [Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value,Cycle.M3.value]\n # 获取每天,获取每个cycle没有分出去的件\n for cycle in cycle_list:\n apps = (Application.select()\n .where(Application.cycle == cycle,\n Application.latest_bomber.is_null(True),\n Application.status != ApplicationStatus.REPAID.value,\n (Application.type ==\n ApplicationType.CASH_LOAN_STAGING.value)))\n aids = [a.id for a in apps]\n if not aids:\n continue\n # 获取指定的bomber\n bombers = (Bomber.select()\n .where(Bomber.is_del == 0,\n Bomber.instalment == cycle))\n bids = [b.id for b in bombers]\n if not bids:\n continue\n average_nums = get_average_number(len(apps),len(bids))\n end = 0\n for i,bid in enumerate(bids):\n start = end\n end = start + average_nums[i]\n bid_apps = aids[start:end]\n with db.atomic():\n # 更新状态\n q = (Application.update(ptp_bomber = None,\n latest_bomber = bid, #最新的催收员id\n last_bomber = Application.latest_bomber,#前一接收的催收员\n status = ApplicationStatus.AB_TEST.value)#人工维护的件\n .where(Application.id << bid_apps)\n .execute())\n record_param = {\"cycle\": cycle,\n \"application_ids\": bid_apps,\n \"dest_bomber_id\": bid}\n out_and_in_record_instalment(**record_param)\n\n\n# 分期的入案和出案\ndef out_and_in_record_instalment(**kwargs):\n if not kwargs.get(\"application_ids\"):\n return\n # 先出案\n out_q = (DispatchAppHistory.update(out_at = fn.NOW())\n .where(DispatchAppHistory.application << kwargs['application_ids'],\n DispatchAppHistory.out_at.is_null(True))\n .execute())\n # 入案\n cycle_period = {\n 1: '10',\n 2: '30',\n 3: '60',\n 4: '90'\n }\n period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')\n kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'\n subquery = (Application\n .select(Application.amount,\n fn.NOW().alias('created_at'),\n fn.NOW().alias('updated_at'),\n Application.id.alias('application_id'),\n R(str(kwargs['dest_bomber_id'])).alias('bomber_id'),\n fn.NOW().alias('entry_at'),\n Application.overdue_days.alias('entry_overdue_days'),\n R(str(kwargs['dest_partner_id'])).alias('partner_id'),\n (SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'\n ' t1.overdue_days) DAY)' % period))\n .alias('expected_out_time'))\n .where(Application.status != ApplicationStatus.REPAID.value,\n Application.id << kwargs['application_ids']))\n application_list = list(subquery)\n for idx in range(0, len(application_list), 50):\n applications = application_list[idx:idx + 50]\n app_ids = [i.application_id for i in applications]\n # 获取所有的overdue_bill\n overdue_bills = (OverdueBill.select()\n .where(OverdueBill.collection_id << app_ids))\n sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]\n bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)\n insert_args = lambad_instalment_result(bill_list, applications)\n if not insert_args:\n continue\n DispatchAppHistory.insert_many(insert_args).execute()\n\n#分期入案结果格式化\ndef lambad_instalment_result(bill_list,applications):\n bill_dict = {}\n insert_args = []\n # 计算入案金额\n for sub_bill in bill_list:\n bill_id = sub_bill[\"bill_id\"]\n principal_pending = sub_bill[\"amount\"] - sub_bill['principal_paid']\n late_fee_pending = sub_bill[\"late_fee\"] - sub_bill[\"late_fee_paid\"]\n if bill_id in bill_dict:\n bill_dict[bill_id][\"entry_principal_pending\"] += principal_pending\n bill_dict[bill_id][\"entry_late_fee_pending\"] += late_fee_pending\n else:\n bill_dict[bill_id] = {\n \"entry_principal_pending\": principal_pending,\n \"entry_late_fee_pending\": late_fee_pending\n }\n\n for app in applications:\n bill_entry = bill_dict.get(app.bill_id, {})\n entry_principal_pending = bill_entry.get(\"entry_principal_pending\", 0)\n entry_late_fee_pending = bill_entry.get(\"entry_late_fee_pending\", 0)\n insert_dict = {\n 'created_at': app.created_at,\n 'updated_at': app.updated_at,\n 'application': app.application_id,\n 'bomber_id': app.bomber_id,\n 'entry_at': app.entry_at,\n 'entry_overdue_days': app.entry_overdue_days,\n 'partner_id': app.partner_id,\n 'expected_out_time': app.expected_out_time,\n 'entry_principal_pending': entry_principal_pending,\n 'entry_late_fee_pending': entry_late_fee_pending\n }\n insert_args.append(insert_dict)\n return insert_args\n\n\ndef c1b_dispatch_in_record(**kwargs):\n app_ids = kwargs.get(\"apps\")\n partner_id = kwargs.get(\"partner_id\",\"null\")\n bill_dict = kwargs.get(\"bill_dict\")\n period = kwargs.get(\"period\")\n bomber_id = kwargs.get('bomber_id')\n if not all([app_ids, partner_id, bill_dict, period]):\n return False\n bill_dict = { str(k):v for k,v in bill_dict.items()}\n subquery = (Application\n .select(Application.amount,\n fn.NOW().alias('created_at'),\n fn.NOW().alias('updated_at'),\n Application.id.alias('application_id'),\n R(str(bomber_id)).alias('bomber_id'),\n fn.NOW().alias('entry_at'),\n Application.overdue_days.alias('entry_overdue_days'),\n R(str(partner_id)).alias('partner_id'),\n (SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'\n ' t1.overdue_days) DAY)' % period))\n .alias('expected_out_time'))\n .where(Application.id << app_ids))\n application_list = list(subquery)\n for idx in range(0,len(application_list),1000):\n applications = application_list[idx:idx+1000]\n insert_args = list(map(partial(lambda_result,\n dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n\n#获取联系的电话号码\n@action(MessageAction.BOMBER_AUTO_CALL_CONTACT)\ndef bomber_auto_call_contact(payload, msg_id):\n application_list = payload['application_list']\n applications = []\n for app_id in application_list:\n applications.append(Application.filter(Application.id == app_id)\n .first())\n # 得到每个件的联系人队列\n with db.atomic():\n for application in applications:\n cycle = application.cycle\n # 修改查询时的条件\n contacts = (\n Contact\n .select()\n .where(Contact.user_id == application.user_id,\n Contact.latest_status.not_in(ContactStatus.no_use()))\n .order_by(-Contact.useful,\n Contact.relationship,\n -Contact.total_duration,\n -Contact.total_count)\n )\n\n level1 = []\n level2 = []\n level3 = []\n level = []\n for c in contacts:\n if c.relationship == Relationship.APPLICANT.value:\n level.append(c)\n elif c.relationship == Relationship.FAMILY.value:\n level1.append(c)\n elif c.relationship == Relationship.COMPANY.value:\n level2.append(c)\n elif c.relationship == Relationship.SUGGESTED.value:\n level3.append(c)\n\n contacts = level + level2 + level1 + level3\n\n numbers = []\n fc_count = 0\n\n # Pre-check if need phone calls,校验手机号是否可以拨通\n app_calls = []\n need_verify = False\n for eac_contact in contacts:\n if (eac_contact.relationship == Relationship.FAMILY.value and\n eac_contact.useful == ContactsUseful.NONE.value):\n need_verify = True\n break\n\n if need_verify:\n logging.info('Found contact need update. app id {}'\n .format(str(application.id)))\n app_calls = AuditService().phone_invalid(cat=Relationship(1).name,\n application_id=application.external_id)\n\n call_history = True\n c1b_family_dict = defaultdict(list)\n for c in contacts:\n if c.relationship == Relationship.COMPANY.value:\n if cycle == Cycle.C1A.value:\n call_history = check_call_history(application)\n break\n if cycle == Cycle.C1B.value:\n # 暂时c1b公司只打本人填写的电话\n if c.source != CompanyContactType.BASIC_INFO_JOB_TEL.value:\n continue\n if c.relationship == Relationship.FAMILY.value:\n if cycle == Cycle.C1A.value:\n call_history = check_call_history(application)\n break\n\n # Update contact useful\n if c.useful == ContactsUseful.NONE.value:\n c.useful = check_valid_phone(app_calls, c)\n c.save()\n\n if c.useful == ContactsUseful.INVALID.value:\n logging.info('Found invalid contact. {}'\n .format(str(c.id)))\n continue\n\n # 需要对family类进行排序\n if cycle == Cycle.C1B.value:\n c1b_family_dict[c.source].append(c.number)\n continue\n if c.relationship == Relationship.SUGGESTED.value:\n if cycle not in (Cycle.C2.value, Cycle.C3.value):\n break\n if cycle == Cycle.C2.value and fc_count > 10:\n break\n if cycle == Cycle.C3.value and fc_count > 20:\n break\n fc_count += 1\n numbers.append(c.number)\n\n # if cycle1 applicant is in no_use add ec\n if len(numbers) == 0 or not call_history:\n src_contact = (\n Contact.select()\n .where(Contact.user_id == application.user_id,\n Contact.source in FamilyContactType.c1a_order()))\n\n # C1A五天内催收电话没打通,按新的顺序拨打;由原来的2种变更为4种\n c1a_family_dict = defaultdict(list)\n for e in src_contact:\n c1a_family_dict[e.source].append(e.number)\n\n for call_type in FamilyContactType.c1a_order():\n numbers.extend(c1a_family_dict[call_type])\n\n if cycle == Cycle.C1B.value:\n for call_type in FamilyContactType.c1b_order():\n numbers.extend(c1b_family_dict[call_type])\n\n numbers = list(set(numbers))\n update_query = (\n AutoCallList\n .update(numbers=','.join(numbers))\n .where(AutoCallList.application == application.id)\n )\n update_query.execute()\n\n\ndef check_valid_phone(phone_list, contact):\n useful = ContactsUseful.AVAILABLE.value\n for each_phone in phone_list:\n if contact.number == each_phone.get('tel_no') or \\\n contact.number == each_phone.get('mobile_no'):\n useful = ContactsUseful.INVALID.value\n break\n return useful\n\n# c1a的件如果5天之内没有接通,开放ec\ndef check_call_history(application):\n app_create_at = application.created_at + timedelta(days=4)\n if datetime.today().date() > app_create_at.date():\n call_actions = (CallActions.select()\n .where(CallActions.type == 0,\n CallActions.application == application.id,\n CallActions.created_at >\n (datetime.now() - timedelta(days=5))))\n for call in call_actions:\n if call.phone_status == PhoneStatus.CONNECTED.value:\n return True\n return False\n return True\n\n\n#当前时间与更新时间间隔超过 SCAVENGER_TIME 时间时,SCAVENGER更新状态\n@action(MessageAction.BOMBER_SCAVENGER)\ndef scavenger(payload, msg_id):\n scavenger_time = -60\n scavenger = (SystemConfig.select()\n .where(SystemConfig.key == 'SCAVENGER_TIME')\n .first())\n if scavenger and scavenger.value.isdigit():\n scavenger_time = -int(scavenger.value)\n update_auto_call_list = (\n AutoCallList\n .update(status=AutoListStatus.PENDING.value,\n description='scavenger')\n .where(\n AutoCallList.status == AutoListStatus.PROCESSING.value,\n AutoCallList.updated_at <\n datetime.now() + timedelta(minutes=scavenger_time),\n )\n )\n count = update_auto_call_list.execute()\n logging.info('scavenger processed %s application', count)\n\n # 更新自动外呼中状态是邮箱的件的状态\n mail_box_scavenger_time = -30\n mail_box_scavenger = (SystemConfig.select()\n .where(SystemConfig.key == 'MAIL_BOX_SCAVENGER_TIME')\n .first())\n if mail_box_scavenger and mail_box_scavenger.value.isdigit():\n mail_box_scavenger_time = -int(mail_box_scavenger.value)\n update_mail_box_call_list = (\n AutoCallList.update(status=AutoListStatus.PENDING.value)\n .where(AutoCallList.status == AutoListStatus.MAILBOX.value,\n AutoCallList.updated_at <\n datetime.now() + timedelta(minutes=mail_box_scavenger_time))\n )\n mail_box_count = update_mail_box_call_list.execute()\n logging.info(\"scavenger update mail box %s\", mail_box_count)\n\n # ivr中30分钟没有接收到回调,修改ivr中的状态\n update_auto_ivr = (\n AutoIVR\n .update(status=AutoIVRStatus.AVAILABLE.value)\n .where(AutoIVR.status == AutoIVRStatus.PROCESSING.value,\n AutoIVR.updated_at < datetime.now() + timedelta(minutes=-30)\n )\n )\n ivr_result = update_auto_ivr.execute()\n logging.info(\"scavenger update %s ivr\"%ivr_result)\n\n\n@action(MessageAction.BOMBER_CLEAR_OVERDUE_PTP)\ndef bomber_clear_overdue_ptp(payload, msg_id):\n # 对于C1B, C2 和 C3 不存在预测试呼出,故其ptp清除后需回到外包或ab_test\n #C1B, C2,C3 件,当前时间超过承诺还款时间时,转为人工维护\n update_overdue_ptp_ab = (\n Application.update(\n status=ApplicationStatus.AB_TEST.value,\n ).where(\n fn.DATE(Application.promised_date) < datetime.today().date(),\n Application.status == ApplicationStatus.PROCESSING.value,\n Application.cycle << [Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value]\n )\n )\n count1 = update_overdue_ptp_ab.execute()\n logging.info('bomber overdue ptp for C1B C2 and C3 cleared: %s', count1)\n\n now_and_yesterday = ((datetime.today() + timedelta(days=1)).date(),\n datetime.today().date())\n overdue_1a1b_cs_ptp = (CallActions\n .select()\n .where(fn.DATE(CallActions.promised_date)\n .in_(now_and_yesterday),\n CallActions.bomber_id == 72))\n update_overdue_1a1b_cs_ptp = (\n Application\n .update(status=ApplicationStatus.UNCLAIMED.value)\n .where(Application.status == ApplicationStatus.PROCESSING.value,\n Application.cycle == Cycle.C1A.value,\n Application.id.in_(overdue_1a1b_cs_ptp)))\n\n logging.debug(\"bomber c1a c1b cs ptp: %s\", update_overdue_1a1b_cs_ptp)\n count2 = update_overdue_1a1b_cs_ptp.execute()\n logging.info('bomber c1a c1b cs overdue ptp cleared: %s', count2)\n\n update_overdue_ptp = (\n Application\n .update(\n status=ApplicationStatus.UNCLAIMED.value,\n ).where(\n fn.DATE(Application.promised_date) < datetime.today().date(),\n Application.status == ApplicationStatus.PROCESSING.value,\n Application.cycle == Cycle.C1A.value,\n )\n )\n count = update_overdue_ptp.execute()\n logging.info('bomber overdue ptp cleared: %s', count)\n\n\n@action(MessageAction.REPORT_BOMBER_COLLECTION)\ndef report_bomber_collection(payload, msg_id):\n start_date = (ReportCollection\n .select(fn.MAX(ReportCollection.apply_date))\n .scalar())\n now = datetime.now()\n if start_date and str(start_date) == str(now)[:10]:\n return\n end_date = str(now + timedelta(days=1))[:10]\n start_date = str(now)[:10]\n\n dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))\n all_overdue_loan_sql1 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n GROUP BY 1;\n \"\"\"\n s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()\n d1 = OperatedDict(s_data1)\n\n all_overdue_loan_sql2 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE DATE(ba.follow_up_date) > CURDATE()\n AND ba.called_counts = 0\n GROUP BY 1;\n \"\"\"\n s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()\n d2 = OperatedDict(s_data2)\n\n overdue_loans_entered_into_predict_call_system_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE ba.called_counts >= 1\n GROUP BY 1;\n \"\"\"\n s_data3 = readonly_db.execute_sql(\n overdue_loans_entered_into_predict_call_system_sql).fetchall()\n d3 = OperatedDict(s_data3)\n\n loans_completed_sql = \"\"\"\n SELECT ba.cycle, COUNT(DISTINCT ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()\n d4 = OperatedDict(s_data4)\n\n connected_calls_automatic_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()\n d5 = OperatedDict(s_data5)\n\n connected_calls_manual_sql = \"\"\"\n SELECT bb.cycle, COUNT(bb.id)\n FROM bomber.bombing_history bb\n WHERE DATE(bb.created_at) = curdate()\n AND (bb.bomber_id < 150 OR bb.bomber_id > 200)\n GROUP BY bb.cycle;\n \"\"\"\n s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()\n d6 = OperatedDict(s_data6)\n\n logging.info('Directly get data from database successfully.')\n\n c1 = d1 - d2\n c2 = d3\n c3 = c2 / c1\n c4 = d4\n c5 = c4 / c2\n c6 = d5\n c7 = c6 / c4\n c8 = d6\n c9 = OperatedDict(get_agent())\n c10 = (c6 + c8) / c9\n try:\n c11 = average_call_duration_team(start_date, end_date)\n except AttributeError:\n c11 = {}\n lst = []\n for i in range(1, 5):\n lst.append({\n 'apply_date': start_date,\n 'cycle': dct[i],\n 'all_overdue_loan': c1.get(i, 0),\n 'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),\n 'of_overdue_loans_entered_into_predict_call_system':\n round(c3.get(i, 0) * 100, 1),\n 'loans_completed': c4.get(i, 0),\n 'of_completed_loans_in_predict_call_system':\n round(c5.get(i, 0) * 100, 1),\n 'connected_calls_automatic': c6.get(i, 0),\n 'connected_calls_automatic_completed_loans':\n round(c7.get(i, 0), 1),\n 'connected_calls_manual': c8.get(i, 0),\n 'agent': c9.get(i, 0),\n 'average_calls_agent': round(c10.get(i, 0), 1),\n 'average_call_duration_team': round(c11.get(i, 0), 1)\n })\n ReportCollection.insert_many(lst).execute()\n\n logging.info('report_bomber_collection:Done!')\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)\ndef bomber_auto_call_list_record(payload, msg_id):\n \"\"\"记录一年的auto_call_list,删除前一天的数据,增加今天的数据\"\"\"\n now = datetime.now()\n if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):\n date_sql = \"\"\"\n SELECT DATE(created_at) FROM auto_call_list_record\n GROUP BY DATE(created_at) limit 1\n \"\"\"\n del_date = db.execute_sql(date_sql).fetchone()[0]\n del_sql = \"\"\"\n DELETE FROM auto_call_list_record WHERE date(created_at) = %s\n \"\"\"\n db.execute_sql(del_sql, [del_date])\n sql = \"\"\"\n INSERT INTO auto_call_list_record\n SELECT * FROM auto_call_list\n \"\"\"\n db.execute_sql(sql)\n logging.info(\"bomber_auto_call_list_record done\")\n\n\n@action(MessageAction.BOMBER_MANUAL_CALL_LIST)\ndef bomber_manual_call_list(payload, msg_id):\n \"\"\"\n 手动分件主要依赖\n\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n batch_id = payload.get('batch_id')\n if batch_id is None:\n logging.warning('Invalid batch id')\n return\n query = (ManualCallList\n .select()\n .where(ManualCallList.batch_id == batch_id,\n ManualCallList.status << ManualCallListStatus.available()))\n if not query.exists():\n logging.warning('Empty application id list')\n return\n\n for q in query:\n application_ids = json.loads(q.application_ids or '[]')\n\n # where\n cycle = 0\n where_list = [(Application.id << application_ids),\n Application.latest_bomber_id == q.src_bomber_id]\n src_params = json.loads(q.src_params or '{}')\n if \"cycle\" in src_params:\n where_list.append(Application.cycle == src_params['cycle'])\n cycle = src_params['cycle']\n if \"status\" in src_params:\n where_list.append(Application.status == src_params['status'])\n\n # update\n update_dict = {'latest_bomber': q.dest_bomber_id}\n dest_params = json.loads(q.dest_params or '{}')\n if \"cycle\" in dest_params:\n update_dict['cycle'] = dest_params['cycle']\n cycle = dest_params['cycle']\n if \"status\" in dest_params:\n update_dict['status'] = dest_params['status']\n\n with db.atomic():\n try:\n # update dispatch_app\n if q.update_dispatch_app:\n if q.dest_partner_id is None:\n raise ValueError('unallowed operation')\n (DispatchApp\n .delete()\n .where(DispatchApp.application_id.in_(application_ids))\n .execute())\n\n (DispatchApp\n .insert_many([{\n 'application': i,\n 'partner': q.dest_partner_id,\n 'bomber': q.dest_bomber_id,\n 'status': DisAppStatus.NORMAL.value}\n for i in application_ids])\n .execute())\n \n application_success_row = (\n Application\n .update(**update_dict)\n .where(*where_list)\n .execute()\n )\n if application_success_row == 0:\n raise ValueError('Invalid parameter')\n\n (ManualCallList\n .update(\n status=ManualCallListStatus.SUCCESS.value,\n length=application_success_row)\n .where(ManualCallList.id == q.id)\n .execute())\n\n out_and_in_record(\n src_bomber_id=q.src_bomber_id,\n application_ids=application_ids,\n dest_partner_id=q.dest_partner_id,\n dest_bomber_id=q.dest_bomber_id,\n cycle=cycle\n )\n except Exception:\n db.rollback()\n (ManualCallList\n .update(\n status=ManualCallListStatus.FAILED.value,\n length=0)\n .where(ManualCallList.id == q.id)\n .execute())\n logging.error(\"PRINT BOMBER_MANUAL_CALL_LIST ERROR:\\n%s\",\n traceback.format_exc())\n continue\n\n\ndef lambda_result(item, dct):\n a = str(item.application_id)\n entry_principal_pending = (Decimal(item.amount or 0) -\n dct[a]['principal_paid'])\n entry_late_fee_pending = dct[a]['late_fee'] - dct[a]['late_fee_paid']\n\n return {\n 'created_at': item.created_at,\n 'updated_at': item.updated_at,\n 'application': a,\n 'bomber_id': item.bomber_id,\n 'entry_at': item.entry_at,\n 'entry_overdue_days': item.entry_overdue_days,\n 'partner_id': item.partner_id,\n 'expected_out_time': item.expected_out_time,\n 'entry_principal_pending': entry_principal_pending,\n 'entry_late_fee_pending': entry_late_fee_pending\n }\n\n\ndef out_and_in_record(**kwargs):\n \"\"\"\n 件在催收系统的出案和入案\n \"\"\"\n new_out_record(**kwargs)\n new_in_record(**kwargs)\n\ndef new_out_record(**kwargs):\n if not kwargs['application_ids']:\n return\n (DispatchAppHistory\n .update(out_at=fn.NOW())\n .where(DispatchAppHistory.bomber_id == kwargs['src_bomber_id'],\n DispatchAppHistory.application << kwargs['application_ids'],\n DispatchAppHistory.out_at.is_null(True))\n .execute())\n # 如果是月底分件,ptp_bomber不用置空\n if kwargs.get(\"month_dispatch\"):\n return\n # 出案时下p的件ptp_bomber置为空\n try:\n (Application.update(ptp_bomber=None)\n .where(Application.id << kwargs[\"application_ids\"])\n .execute())\n except Exception as e:\n logging.error(\"new_out_record error:aids:%s,error:%s\" %\n (kwargs[\"application_ids\"],str(e)))\n\ndef new_in_record(**kwargs):\n cycle_period = {\n 1: '10',\n 2: '30',\n 3: '60',\n 4: '90'\n }\n period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')\n kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'\n subquery = (Application\n .select(Application.amount,\n fn.NOW().alias('created_at'),\n fn.NOW().alias('updated_at'),\n Application.id.alias('application_id'),\n R(str(kwargs['dest_bomber_id'])).alias('bomber_id'),\n fn.NOW().alias('entry_at'),\n Application.overdue_days.alias('entry_overdue_days'),\n R(str(kwargs['dest_partner_id'])).alias('partner_id'),\n (SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'\n ' t1.overdue_days) DAY)' % period))\n .alias('expected_out_time'))\n .where(Application.status != ApplicationStatus.REPAID.value,\n Application.id << kwargs['application_ids']))\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n app_ids = [i.application_id for i in applications]\n bill_list = BillService().bill_list(application_ids=app_ids)\n bill_dict = {str(bill['application_id']): bill for bill in bill_list}\n insert_args = list(map(partial(lambda_result,\n dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n\ndef end_old_application(old_app, paid=False):\n if paid:\n if old_app.status == OldLoanStatus.WAITING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return\n if old_app.status == OldLoanStatus.PROCESSING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return old_app.application_id\n\n end_date = old_app.end_date\n now = datetime.now()\n if now >= max(end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n old_app.save()\n return old_app.application_id\n\n\n@action(MessageAction.UPDATE_OLD_LOAN_APPLICATION)\ndef update_old_loan_application(payload, msg_id):\n items = (Application\n .select(Application, OldLoanApplication)\n .join(OldLoanApplication,\n JOIN_INNER,\n on=(Application.id ==\n OldLoanApplication.application_id).alias('old_app'))\n .where(OldLoanApplication.status\n .in_(OldLoanStatus.available())))\n out_list = []\n for application in items:\n if application.overdue_days > 90:\n if application.old_app.status == OldLoanStatus.WAITING.value:\n start_old_application(application.old_app)\n else:\n out_list.append(application.old_app)\n\n success_list = [end_old_application(item) for item in out_list]\n app_ids = list(filter(None, success_list))\n\n if app_ids:\n bomber_id = SpecialBomber.OLD_APP_BOMBER.value\n out_record(src_bomber_id=bomber_id, application_ids=app_ids)\n\n\ndef in_record(**kwargs):\n \"\"\"\n :param kwargs: dist_partner_id, dist_bomber_id,\n expected_out_time, application_ids\n :return:\n \"\"\"\n # TODO: 入案记录统一\n kwargs['dist_partner_id'] = kwargs.get('dist_partner_id') or 'null'\n subquery = (Application\n .select(Application.amount,\n fn.NOW().alias('created_at'),\n fn.NOW().alias('updated_at'),\n Application.id.alias('application_id'),\n R(str(kwargs['dist_bomber_id'])).alias('bomber_id'),\n fn.NOW().alias('entry_at'),\n Application.overdue_days.alias('entry_overdue_days'),\n R(str(kwargs['dist_partner_id'])).alias('partner_id'),\n R('\"{}\"'.format(kwargs['expected_out_time']))\n .alias('expected_out_time'))\n .where(Application.status != ApplicationStatus.REPAID.value,\n Application.id << kwargs['application_ids']))\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n app_ids = [i.application_id for i in applications]\n bill_list = BillService().bill_list(application_ids=app_ids)\n bill_dict = {str(bill['application_id']): bill for bill in bill_list}\n insert_args = list(map(partial(lambda_result, dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\ndef out_record(**kwargs):\n \"\"\"\n\n :param kwargs: src_bomber_id, application_ids\n :return:\n \"\"\"\n # TODO: 出案记录统一\n if not kwargs.get('application_ids'):\n return\n (DispatchAppHistory\n .update(out_at=fn.NOW())\n .where(DispatchAppHistory.bomber_id == kwargs['src_bomber_id'],\n DispatchAppHistory.application << kwargs['application_ids'])\n .execute())\n # 出案时下p的件ptp_bomber置为空\n try:\n (Application.update(ptp_bomber=None)\n .where(Application.id << kwargs[\"application_ids\"])\n .execute())\n except Exception as e:\n logging.error(\"out_record error:aids:%s,error:%s\" %\n (kwargs[\"application_ids\"], str(e)))\n\n\ndef start_old_application(old_app, cancel=False):\n application_id = old_app.application_id\n if cancel and (old_app.status == OldLoanStatus.PAID.value):\n now = datetime.now()\n if old_app.start_date is None:\n # 未进入500的池子里\n old_app.status = OldLoanStatus.WAITING.value\n elif now >= max(old_app.end_date, old_app.promised_date or now):\n # 撤销时用户已经从500的池子出去\n old_app.status = OldLoanStatus.FINISHED.value\n (DispatchAppHistory\n .update(out_at=max(old_app.end_date,\n old_app.promised_date or now))\n .where(DispatchAppHistory.bomber_id == old_app.bomber_id,\n DispatchAppHistory.application == application_id)\n .execute())\n else:\n # 还在500的池子\n old_app.status = OldLoanStatus.PROCESSING.value\n (DispatchAppHistory\n .update(out_at=None)\n .where(DispatchAppHistory.bomber_id == old_app.bomber_id,\n DispatchAppHistory.application == application_id)\n .execute())\n old_app.save()\n return\n\n application = (\n Application\n .get_or_none(Application.id == application_id,\n Application.status != ApplicationStatus.REPAID.value,\n Application.overdue_days > 90,\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) <\n datetime.today().date())))\n if not application:\n logging.error(\"Can not set old application %s to start collecting\",\n application_id)\n return\n\n if old_app.status in OldLoanStatus.no_available():\n logging.info(\"%s has finished or paid\", old_app.application_id)\n return\n\n config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)\n sp = config.get(SCI.OLD_APP_PERIOD,\n SCI.OLD_APP_PERIOD.default_value)\n old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value\n old_app.status = OldLoanStatus.PROCESSING.value\n old_app.bomber_id = old_app_bomber\n old_app.start_date = datetime.now()\n # 此处需要判断end_date是否已经被设置过\n if not old_app.end_date:\n old_app.end_date = datetime.now() + timedelta(days=sp)\n old_app.save()\n in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,\n application_ids=[old_app.application_id],\n expected_out_time=str(old_app.end_date))\n\n\n@action(MessageAction.OLD_LOAN_APPLICATION)\ndef old_loan_application(payload, msg_id):\n application_id = payload.get('application_id')\n numbers = payload.get('numbers', [])\n if not (application_id and numbers):\n logging.error(\"empty application id: %s, or invalid numbers: %s\",\n application_id, numbers)\n\n application = Application.get_or_none(Application.id == application_id)\n if (application and\n application.status == ApplicationStatus.REPAID.value):\n logging.error(\"application %s has paid\", application_id)\n return\n\n gold_eye = GoldenEye().get('/applications/%s' % application_id)\n if not gold_eye.ok:\n raise RuntimeError('Get golden eye user failed. {}'\n .format(str(application_id)))\n\n gold_app = gold_eye.json().get('data')\n user_id = gold_app['user_id']\n user_name = gold_app['id_name']\n\n # 通过bill获取账单类型,如果是分期的账单不关联OldloanApplication\n try:\n bill = BillService().bill_dict(application_id=application_id)\n except Exception:\n logging.error(\n 'application %s get bill info failed,old_loan_application',\n application_id)\n return\n\n source_contacts = (Contact\n .filter(Contact.user_id == user_id,\n Contact.relationship ==\n Relationship.APPLICANT.value,\n Contact.source ==\n ApplicantSource.NEW_APPLICANT.value))\n source_contact_set = {i.number for i in source_contacts}\n\n # 如果是分期不做一下操作\n if bill[\"category\"] != ApplicationType.CASH_LOAN_STAGING.value:\n # 获取已有new applicant号码\n old_app = OldLoanApplication.get_or_none(\n OldLoanApplication.application_id == application_id,\n OldLoanApplication.status.in_(OldLoanStatus.available())\n )\n if not old_app:\n old_app = OldLoanApplication.create(application_id=application_id,\n user_id=user_id,\n numbers=','.join(numbers))\n else:\n _numbers = old_app.numbers.split(',')\n # 去重并且删除空号码\n old_app.numbers = ','.join(set([nu for nu in (_numbers + numbers)\n if nu]))\n # 已入催件end_date + 7\n if old_app.status == OldLoanStatus.PROCESSING.value:\n old_app.end_date = old_app.end_date + timedelta(days=7)\n old_app.save()\n\n new_contact = set(numbers) - source_contact_set\n insert_args = [{'user_id': user_id,\n 'name': user_name,\n 'number': i,\n 'relationship': Relationship.APPLICANT.value,\n 'source': ApplicantSource.NEW_APPLICANT.value,\n 'real_relationship': Relationship.APPLICANT.value\n } for i in new_contact]\n if insert_args:\n Contact.insert_many(insert_args).execute()\n if bill[\"category\"] == ApplicationType.CASH_LOAN_STAGING.value:\n return\n start_old_application(old_app)\n\n\ndef run_one_sql(sql):\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n result = cursor.fetchone()[0] / 1000000\n except Exception as e:\n logging.info('run sql error: %s' % str(sql))\n result = Decimal(0)\n return result\n\n\ndef run_member_sql(sql):\n result = [0, 0]\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n sql_result = cursor.fetchone()\n if sql_result:\n result = sql_result\n except Exception as e:\n logging.info('run sql error: %s' % str(sql))\n return result\n\n\ndef run_all_sql(sql):\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n result = cursor.fetchall()\n except Exception as e:\n logging.info('run sql error: %s' % str(sql))\n result = []\n return result\n\n\n# 得到dpd1-3的待催维度recover_rate(废弃)\ndef get_before_bomber(date_time):\n begin_time = str(date_time - timedelta(days=7))\n end_time = str(date_time)\n # 得到每周一已存在的件的待催金额\n old_sql = \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at<date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\" % (begin_time, begin_time)\n old_data = run_one_sql(old_sql)\n\n # 得到每天新达到dpd1的待催件的金额\n new_sql = \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at> '%s' \n and created_at<'%s'\n and overdue_days=1;\n \"\"\" % (begin_time, end_time)\n new_data = run_one_sql(new_sql)\n\n # 计算每天进入dpd4的金额\n dpd4_sql = \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>date_add('%s',interval 1 day) \n and created_at< date_add('%s',interval 1 day)\n and overdue_days=4;\n \"\"\" % (begin_time, end_time)\n dpd4_data = run_one_sql(dpd4_sql)\n\n # 周一时的dpd2\\3待还\n dpd2_sql = \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at< date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\" % (end_time, end_time)\n dpd2_data = run_one_sql(dpd2_sql)\n\n all_money = old_data + new_data\n repayment = all_money - dpd4_data - dpd2_data\n pro = 0\n if all_money:\n pro = (repayment / all_money) * 100\n RepaymentReport.create(\n time=begin_time,\n cycle=0,\n all_money=all_money,\n proportion=pro,\n repayment=repayment\n )\n\n\n# 每周刷新一次recover_rate报表数据(待催维度)\n@action(MessageAction.RECOVER_RATE_WEEK_MONEY)\ndef recover_rate_week_money(payload, msg_id):\n #获取当天RECOVER_RATE_WEEK_MONEY日志次数\n worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))\n .where(WorkerLog.created_at >= date.today(),\n WorkerLog.action == 'RECOVER_RATE_WEEK_MONEY')\n .first())\n\n if worker_log.logs >= 5:\n return\n logging.info('start cal recover_rate_week_money')\n date_time = date.today()\n get_every_cycle_report(date_time)\n\n\n# 得到入催維度的dpd1-3的recover_rate\ndef get_before_bomber_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n for i in range(2, 5):\n money_sql = \"\"\"\n select \n sum(bo1.principal_pending+bo1.late_fee_pending+\n bo1.interest_pending) as dpd1_pending, \n sum(bo2.principal_pending+bo2.late_fee_pending+\n bo2.interest_pending) as dpd4_pending\n from bill_java.overdue bo1\n left join dashboard.application da \n on bo1.application_id=da.id \n left join bill_java.overdue bo2 \n on bo1.application_id=bo2.application_id \n and bo2.overdue_days=%s and bo2.status = 1\n where bo1.overdue_days=1 \n and bo1.status = 1\n and bo1.which_day_overdue>='%s' \n and bo1.which_day_overdue<'%s'\n and da.is_first_loan = %s\n and bo1.stage_num is null\n \"\"\" % (i, begin_date, end_date, is_first_loan)\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(money_sql)\n money = cursor.fetchone()\n all_money = money[0] / 1000000\n dpd4_money = money[1] / 1000000\n except Exception as e:\n logging.info('get all_money error: %s' % str(e))\n all_money = 0\n dpd4_money = 0\n\n repayment = all_money - dpd4_money\n if begin_date == date_time - timedelta(days=1):\n RepaymentReportInto.create(\n time=begin_date,\n cycle=0,\n all_money=round(all_money, 3),\n proportion='0',\n repayment=round(repayment, 3),\n is_first_loan=is_first_loan,\n contain_out=ContainOut.CONTAIN.value\n )\n else:\n pro = '0'\n if all_money:\n pro = (repayment / all_money) * 100\n pro = str(round(pro, 2))\n RepaymentReportInto.update(\n repayment=round(repayment, 3),\n proportion=pro\n ).where(\n RepaymentReportInto.time == begin_date,\n RepaymentReportInto.cycle == 0,\n RepaymentReportInto.is_first_loan == is_first_loan\n ).execute()\n\n end_date = begin_date\n begin_date = begin_date - timedelta(days=1)\n\n\n# 得到c1a入催维度的recover_rate\ndef get_c1a_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\" % (begin_date, end_date, is_first_loan)\n all_money = run_one_sql(all_money_sql)\n\n begin_date = date_time - timedelta(days=19)\n repayment_sql = \"\"\"\n select \n sum(b.principal_part+b.late_fee_part) as paid_amount, \n cdt\n from \n (select \n br.principal_part, br.late_fee_part, \n date(cdt) as cdt, br.repay_at, br.application_id\n from (\n select ba.id, ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)\n group by 4, 5) b\n group by 2\n \"\"\" % (begin_date, end_date, is_first_loan)\n repayment = run_all_sql(repayment_sql)\n\n if not repayment:\n return\n RepaymentReportInto.create(\n time=end_date - timedelta(days=1),\n cycle=Cycle.C1A.value,\n all_money=round(all_money, 3),\n proportion='0',\n repayment=0,\n is_first_loan=is_first_loan,\n contain_out=ContainOut.CONTAIN.value\n )\n\n for d in repayment:\n repay = d[0] / 1000000\n report = RepaymentReportInto.filter(\n RepaymentReportInto.time == d[1],\n RepaymentReportInto.cycle == Cycle.C1A.value,\n RepaymentReportInto.is_first_loan == is_first_loan\n ).first()\n if report:\n report.repayment = round(repay, 3)\n pro = (repay / report.all_money) * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n# 得到c1b入催维度的recover_rate\ndef get_c1b_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,c1b_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c1b_entry >= '%s'\n and ba.c1b_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\" % (begin_date, end_date, is_first_loan)\n all_money = run_one_sql(all_money_sql)\n\n not_contain_sql = \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,c1b_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c1b_entry >= '%s'\n and ba.c1b_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id and bd.partner_id=5)\n ) a\n inner join bill_java.overdue o on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\" % (begin_date, end_date, is_first_loan)\n not_contain_money = run_one_sql(not_contain_sql)\n\n begin_date = date_time - timedelta(days=22)\n repayment_sql = \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount,et\n from \n (select br.principal_part, br.late_fee_part,\n date(a.c1b_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c1b_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c1b_entry >= '%s'\n and ba.c1b_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 2\n group by 4, 5) b\n group by 2;\n \"\"\" % (begin_date, end_date, is_first_loan)\n repayment = run_all_sql(repayment_sql)\n\n not_contain_repay_sql = \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c1b_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c1b_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c1b_entry >= '%s'\n and ba.c1b_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=5)\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 2\n group by 4, 5) b\n group by 2\n \"\"\" % (begin_date, end_date, is_first_loan)\n not_contain_repay = run_all_sql(not_contain_repay_sql)\n\n if not not_contain_repay and not repayment:\n return\n for i in ContainOut.values():\n if i == ContainOut.NOT_CONTAIN.value:\n RepaymentReportInto.create(\n time=end_date - timedelta(days=1),\n cycle=Cycle.C1B.value,\n all_money=round(not_contain_money, 3),\n proportion='0',\n repayment=0,\n is_first_loan=is_first_loan,\n contain_out=ContainOut.NOT_CONTAIN.value\n )\n for repay in not_contain_repay:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n\n report = RepaymentReportInto.filter(\n RepaymentReportInto.time == repay[1],\n RepaymentReportInto.is_first_loan == is_first_loan,\n RepaymentReportInto.contain_out == i,\n RepaymentReportInto.cycle == Cycle.C1B.value\n ).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = (repay_money / report.all_money) * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n elif i == ContainOut.CONTAIN.value:\n RepaymentReportInto.create(\n time=end_date - timedelta(days=1),\n cycle=Cycle.C1B.value,\n all_money=round(all_money, 3),\n proportion='0',\n repayment=0,\n is_first_loan=is_first_loan,\n contain_out=ContainOut.CONTAIN.value\n )\n for repay in repayment:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n\n report = RepaymentReportInto.filter(\n RepaymentReportInto.time == repay[1],\n RepaymentReportInto.is_first_loan == is_first_loan,\n RepaymentReportInto.contain_out == i,\n RepaymentReportInto.cycle == Cycle.C1B.value\n ).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = (repay_money / report.all_money) * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n# 得到c2入催维度的recover_rate\ndef get_c2_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\" % (begin_date, end_date, is_first_loan)\n all_money = run_one_sql(all_money_sql)\n\n not_contain_sql = \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\" % (begin_date, end_date, is_first_loan)\n not_contain_money = run_one_sql(not_contain_sql)\n\n begin_date = date_time - timedelta(days=37)\n repayment_sql = \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\" % (begin_date, end_date, is_first_loan)\n repayment = run_all_sql(repayment_sql)\n\n not_contain_repay_sql = \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\" % (begin_date, end_date, is_first_loan)\n not_contain_repay = run_all_sql(not_contain_repay_sql)\n\n if not not_contain_money and repayment:\n return\n for i in ContainOut.values():\n if i == ContainOut.NOT_CONTAIN.value:\n RepaymentReportInto.create(\n time=end_date - timedelta(days=1),\n cycle=Cycle.C2.value,\n all_money=round(not_contain_money, 3),\n proportion='0',\n repayment=0,\n is_first_loan=is_first_loan,\n contain_out=ContainOut.NOT_CONTAIN.value\n )\n for repay in not_contain_repay:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(\n RepaymentReportInto.time == repay[1],\n RepaymentReportInto.is_first_loan == is_first_loan,\n RepaymentReportInto.contain_out == i,\n RepaymentReportInto.cycle == Cycle.C2.value\n ).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = (repay_money / report.all_money) * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n elif i == ContainOut.CONTAIN.value:\n RepaymentReportInto.create(\n time=end_date - timedelta(days=1),\n cycle=Cycle.C2.value,\n all_money=round(all_money, 3),\n proportion='0',\n repayment=0,\n is_first_loan=is_first_loan,\n contain_out=ContainOut.CONTAIN.value\n )\n for repay in repayment:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n report = RepaymentReportInto.filter(\n RepaymentReportInto.time == repay[1],\n RepaymentReportInto.is_first_loan == is_first_loan,\n RepaymentReportInto.contain_out == i,\n RepaymentReportInto.cycle == Cycle.C2.value\n ).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = (repay_money / report.all_money) * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n# 得到c2入催维度的recover_rate\ndef get_c3_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id, ba.c3_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\" % (begin_date, end_date, is_first_loan)\n all_money = run_one_sql(all_money_sql)\n\n begin_date = date_time - timedelta(days=30)\n repayment_sql = \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c3_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c3_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = '%s'\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 4\n group by 4, 5) b\n group by 2\n \"\"\" % (begin_date, end_date, is_first_loan)\n repayment = run_all_sql(repayment_sql)\n\n RepaymentReportInto.create(\n time=end_date - timedelta(days=1),\n cycle=Cycle.C3.value,\n all_money=round(all_money, 3),\n proportion='0',\n repayment=0,\n is_first_loan=is_first_loan,\n contain_out=ContainOut.CONTAIN.value\n )\n if not repayment:\n return\n for repay in repayment:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(\n RepaymentReportInto.time == repay[1],\n RepaymentReportInto.cycle == Cycle.C3.value,\n RepaymentReportInto.is_first_loan == is_first_loan\n ).first()\n if report:\n report.repayment = repay_money\n pro = 0\n if report.all_money and int(report.all_money):\n pro = (repay_money / report.all_money) * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n# 每天刷新一次recover_rate报表数据(入催维度)\n@action(MessageAction.RECOVER_RATE_WEEK_MONEY_INTO)\ndef recover_rate_week_money_into(payload, msg_id):\n worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))\n .where(WorkerLog.created_at >= date.today(),\n WorkerLog.action == 'RECOVER_RATE_WEEK_MONEY_INTO')\n .first())\n\n if worker_log and worker_log.logs >= 5:\n return\n date_time = date.today()\n get_before_bomber_rate(date_time)\n get_c1a_into_rate(date_time)\n get_c1b_into_rate(date_time)\n get_c2_into_rate(date_time)\n get_c3_into_rate(date_time)\n\n # 将已经成熟的数据从未成熟改为成熟\n ripe_days = {0: 3, 1: 7, 2: 20, 3: 30, 4: 30}\n for i in range(0, 5):\n repe_date = date.today() - timedelta(days=ripe_days[i])\n (RepaymentReportInto\n .update(ripe_ind=RipeInd.RIPE.value)\n .where(RepaymentReportInto.time < repe_date,\n RepaymentReportInto.cycle == i)\n ).execute()\n\n\n# ----------------- 计算summary_bomber中原summary存在的指标 --------------------\n# 得到基础数据\ndef get_static_bomber(begin_date):\n active_date = begin_date - timedelta(days=8)\n bombers = (BomberR\n .select(BomberR.id,\n BomberR.role.alias('role'),\n BomberR.last_active_at.alias('active'))\n .where(BomberR.last_active_at > active_date,\n BomberR.role << [1, 2, 4, 5, 6, 8,9]))\n summary = []\n for bomber in bombers:\n summary.append({\n 'time': begin_date,\n 'bomber_id': bomber.id,\n 'cycle': bomber.role.cycle,\n 'work_ind': 0\n })\n SummaryBomber.insert_many(summary).execute()\n\n\n# 部分指标须在当天晚上计算完成\n@action(MessageAction.SUMMARY_CREATE)\ndef summary_create(payload, msg_id):\n begin_date = date.today()\n worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))\n .where(WorkerLog.created_at >= begin_date,\n WorkerLog.action == 'SUMMARY_CREATE')\n .first())\n\n if worker_log and worker_log.logs >= 5:\n return\n\n get_static_bomber(begin_date)\n\n\n# 得到当天工作的员工\ndef get_active_bomber(begin_date):\n bombers = (BomberR\n .select(BomberR.id)\n .where(BomberR.last_active_at >= begin_date))\n for bomber in bombers:\n (SummaryBomber.update(work_ind=1)\n .where(SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == bomber.id)\n ).execute()\n\n\n# 得到每个催收员每天拨打电话数和拨打件数\n@time_logger\ndef get_call_and_made(end_date, begin_date, real_time_query=False):\n call_sql = \"\"\"\n select \n bomber_id, \n count(case when relationship is not null then application_id end) \n as 'call_cnt', \n count(distinct case when relationship is not null then \n application_id end) as 'call_case',\n count(case when phone_status=4 then application_id end) as 'connect',\n count(distinct case when phone_status=4 then application_id end) \n as 'connect_case'\n from (\n select bomber_id,application_id,phone_status, cycle, relationship\n from bomber.call_actions ba\n where created_at>'%s' and created_at<'%s'\n and type in (0, 1)\n ) a\n group by 1\n \"\"\" % (begin_date, end_date)\n calls = run_all_sql(call_sql)\n if real_time_query:\n return calls\n for call in calls:\n bomber, call_cnt, case_made, connect_cnt, case_connect = call\n (SummaryBomber.update(\n case_made_cnt=case_made,\n call_cnt=call_cnt,\n call_connect_cnt=connect_cnt,\n case_connect_cnt=case_connect)\n .where(\n SummaryBomber.bomber_id == bomber,\n SummaryBomber.time == begin_date)\n ).execute()\n return calls\n\n\n# 得到每个催收员每天待催件数\n@time_logger\ndef get_claimed_cnt(end_date, begin_date, real_time_query=False):\n table_date = begin_date - timedelta(days=30)\n claimed_sql = \"\"\"\n SELECT\n COUNT( `t1`.`application_id` ) AS cnt,\n `t1`.`bomber_id` AS bomber_id \n FROM\n `dispatch_app_history` AS t1 \n WHERE\n ( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null ) \n AND ( `t1`.`bomber_id` != 1000 ) \n AND ( `t1`.`partner_id` IS null ) \n AND ( `t1`.`entry_at` > '%s' ) \n AND ( `t1`.`entry_at` < '%s' ) \n GROUP BY\n `t1`.`bomber_id`\n \"\"\" % (begin_date, table_date, end_date)\n claimeds = run_all_sql(claimed_sql)\n if real_time_query:\n return claimeds\n for claimed in claimeds:\n cnt, bomber_id = claimed\n (SummaryBomber.update(claimed_cnt=cnt)\n .where(SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == bomber_id)\n ).execute()\n return claimeds\n\n\n# 得到短信相关数据\ndef get_sms_data(end_data, begin_data):\n all_sms = (ConnectHistoryR\n .select(ConnectHistoryR.operator.alias('bomber_id'),\n fn.COUNT(ConnectHistoryR.application).alias('sms_send'))\n .where(ConnectHistoryR.created_at > begin_data,\n ConnectHistoryR.created_at < end_data,\n ConnectHistoryR.type.in_(ConnectType.sms()))\n .group_by(ConnectHistoryR.operator))\n\n for sms in all_sms:\n (SummaryBomber.update(sms_cnt=sms.sms_send)\n .where(SummaryBomber.time == begin_data,\n SummaryBomber.bomber_id == sms.bomber_id)\n ).execute()\n return all_sms\n\n\n# 得到ptp相关的数据\n@time_logger\ndef get_ptp_data(end_date, begin_date, real_query_time=False):\n sql = \"\"\"\n SELECT\n a.bomber_id,\n sum( a.promised_amount ) AS ptp_amount,\n count( application_id ) \n FROM\n bomber.auto_call_actions a\n LEFT JOIN bomber.bomber c ON a.bomber_id = c.id \n WHERE\n a.created_at >= '%s' \n AND a.created_at < '%s'\n AND a.promised_date != '' \n GROUP BY 1 \n UNION\n SELECT\n a.bomber_id,\n ifnull( sum( a.promised_amount ), 0 ) AS ptp_amount,\n count( application_id ) \n FROM\n bomber.bombing_history a\n LEFT JOIN bomber.bomber c ON a.bomber_id = c.id \n WHERE\n bomber_id NOT BETWEEN 151 \n AND 177 \n AND bomber_id NOT BETWEEN 181 \n AND 183 \n AND bomber_id != 72 \n AND a.created_at >= '%s' \n AND a.created_at < '%s' \n AND a.promised_date != '' \n GROUP BY 1\n \"\"\" % (begin_date, end_date, begin_date, end_date)\n ptp_datas = run_all_sql(sql)\n if real_query_time:\n return ptp_datas\n\n result = {}\n for ptp in ptp_datas:\n bomber_id, amount, cnt = ptp\n if bomber_id in result.keys():\n result[bomber_id][0] += amount\n result[bomber_id][1] += cnt\n continue\n result[bomber_id] = [amount, cnt]\n for key, value in result.items():\n (SummaryBomber\n .update(\n promised_cnt=value[1],\n promised_amount=value[0]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == key\n )).execute()\n return ptp_datas\n\n\n# 统计回款金额和回款件数\n@time_logger\ndef get_recover_amount(end_date, begin_date, real_time_query=False):\n C1_sql = \"\"\"\n SELECT a.current_bomber_id,\n sum(principal_part+late_fee_part) as pay_amount,\n count(distinct application_id)\n from \n (select a.cycle,a.current_bomber_id,b.username,a.principal_part,\n a.late_fee_part,a.application_id,a.repay_at\n FROM bomber.repayment_log a ,bomber.bomber b\n WHERE a.repay_at >= '%s' AND a.repay_at <'%s'\n AND a.current_bomber_id !=''\n AND a.current_bomber_id = b.id\n and b.role_id in (1,4)\n and principal_part+late_fee_part>0\n group by 6,7) a\n GROUP BY a.cycle,a.current_bomber_id\n \"\"\" % (begin_date, end_date)\n C1_results = run_all_sql(C1_sql)\n if not real_time_query:\n for C1_result in C1_results:\n bomber_id, amount, cnt = C1_result\n (SummaryBomber.update(\n cleared_cnt=cnt,\n cleared_amount=amount\n ).where(\n SummaryBomber.bomber_id == bomber_id,\n SummaryBomber.time == begin_date\n )).execute()\n\n other_sql = \"\"\"\n select current_bomber_id,sum(pay_amount) as pay_amount,\n count(distinct application_id)\n from (\n select application_id,current_bomber_id,pay_amount,repay_at\n from (\n select br.application_id,br.current_bomber_id,\n br.principal_part+br.late_fee_part as pay_amount,br.repay_at\n from bomber.repayment_log br\n left join bomber.bomber bb on br.current_bomber_id=bb.id\n where exists (select 1 from bomber.bombing_history bb \n where br.current_bomber_id=bb.bomber_id \n and br.application_id=bb.application_id \n and bb.created_at<br.repay_at \n and (bb.promised_date is not null \n or bb.promised_amount is not null))\n and br.repay_at >= '%s'\n and br.repay_at < '%s'\n and bb.role_id in (2,3,5,6,7,8,9) \n and br.principal_part+br.late_fee_part > 0\n group by 1,4\n ) a\n group by 1,4) b\n group by 1\n \"\"\" % (begin_date, end_date)\n sql_results = run_all_sql(other_sql)\n if not real_time_query:\n for sql_result in sql_results:\n bomber_id, amount, cnt = sql_result\n (SummaryBomber.update(\n cleared_cnt=cnt,\n cleared_amount=amount\n ).where(\n SummaryBomber.bomber_id == bomber_id,\n SummaryBomber.time == begin_date\n )).execute()\n result = sql_results + C1_results\n return result\n\n\n# summary 报表新数据(分布计算,先计算一部分数据)\n@action(MessageAction.SUMMARY_NEW)\ndef summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))\n .where(WorkerLog.created_at >= end_date,\n WorkerLog.action == 'SUMMARY_NEW')\n .first())\n\n if worker_log and worker_log.logs >= 5:\n return\n\n get_active_bomber(begin_date)\n get_call_and_made(end_date, begin_date)\n get_claimed_cnt(end_date, begin_date)\n get_sms_data(end_date, begin_date)\n get_ptp_data(end_date, begin_date)\n get_recover_amount(end_date, begin_date)\n get_unfollowed(begin_date)\n get_unfollowed_call(begin_date)\n\n\n# ------------------------ 计算summary bomber的另部分指标 ----------------------\n# 得到新件件数和金额\ndef get_new_case_amount(begin_date, end_date):\n all_case = (DispatchAppHistoryR\n .select(fn.SUM(DispatchAppHistoryR.entry_late_fee_pending +\n DispatchAppHistoryR.entry_principal_pending)\n .alias('pending'),\n DispatchAppHistoryR.bomber_id,\n fn.COUNT(DispatchAppHistoryR.application).alias('cnt'))\n .where(DispatchAppHistoryR.entry_at > begin_date,\n DispatchAppHistoryR.entry_at < end_date,\n DispatchAppHistoryR.partner_id.is_null(True))\n .group_by(DispatchAppHistoryR.bomber_id))\n for case in all_case:\n SummaryBomber.update(\n new_case_amount_sum=case.pending,\n new_case_cnt=case.cnt\n ).where(\n SummaryBomber.bomber_id == case.bomber_id,\n SummaryBomber.time == begin_date\n ).execute()\n return all_case\n\n\n# 得到KP相关数据\ndef get_kp_cleared(begin_date, end_date):\n auto_call_sql = \"\"\"\n SELECT\n a.current_bomber_id, count( b.application_id ) \n FROM\n (SELECT\n current_bomber_id, principal_part, late_fee_part,\n repay_at, application_id \n FROM\n bomber.repayment_log \n WHERE\n repay_at >= '%s' \n AND repay_at < '%s' \n GROUP BY 4, 5 ) a\n LEFT JOIN (\n SELECT\n cycle, bomber_id, promised_amount, promised_date,\n application_id, created_at \n FROM\n bomber.auto_call_actions \n WHERE\n created_at >= date_sub( '%s', INTERVAL 7 DAY ) \n AND created_at < '%s' \n AND promised_date IS NOT NULL \n ) b ON a.current_bomber_id = b.bomber_id \n AND a.application_id = b.application_id \n AND date( a.repay_at ) <= date( b.promised_date ) \n AND date( a.repay_at ) >= date( b.created_at )\n LEFT JOIN bomber.bomber c ON a.current_bomber_id = c.id \n WHERE\n b.promised_date >= '%s'\n GROUP BY 1\n \"\"\" % (begin_date, end_date, begin_date, end_date, begin_date)\n auto_call_results = run_all_sql(auto_call_sql)\n\n manual_sql = \"\"\"\n SELECT\n a.current_bomber_id, count( b.application_id ) \n FROM\n (SELECT\n current_bomber_id, principal_part, late_fee_part,\n repay_at, application_id, created_at \n FROM\n bomber.repayment_log \n WHERE\n repay_at >= '%s' \n AND repay_at < '%s' \n AND principal_part + late_fee_part > 0 \n GROUP BY 2, 5 ) a\n LEFT JOIN (\n SELECT\n cycle, bomber_id, promised_amount, promised_date, \n application_id, created_at\t\n FROM\n bomber.bombing_history \n WHERE\n created_at >= date_sub( '%s', INTERVAL 7 DAY ) \n AND created_at < '%s' \n AND promised_date IS NOT NULL \n ) b ON a.current_bomber_id = b.bomber_id \n AND a.application_id = b.application_id \n AND date( a.repay_at ) <= date( b.promised_date ) \n AND date( a.repay_at ) >= date( b.created_at )\n LEFT JOIN bomber.bomber c ON a.current_bomber_id = c.id \n WHERE\n b.promised_date >= '%s'\n GROUP BY 1\n \"\"\" % (begin_date, end_date, begin_date, end_date, begin_date)\n manual_results = run_all_sql(manual_sql)\n\n sql_result = auto_call_results + manual_results\n result = {}\n for data in sql_result:\n if data[0] in result.keys():\n result[data[0]] += data[1]\n continue\n result[data[0]] = data[1]\n for key, value in result.items():\n (SummaryBomber\n .update(\n KP_cleared_cnt=value\n ).where(\n SummaryBomber.bomber_id == key,\n SummaryBomber.time == begin_date)\n ).execute()\n\n\n# 得到当天处于ptp的件(KP率的分母)\ndef get_kp_today(begin_date, end_date):\n sql = \"\"\"\n select bomber_id, count(distinct application_id)\n from( \n SELECT bomber_id, application_id\n FROM bomber.auto_call_actions a\n WHERE promised_date >= '%s' AND created_at < '%s' \n AND EXISTS(select 1 from bomber.application ba \n where a.application_id=ba.id \n and (ba.finished_at is null \n or ba.finished_at > '%s'))\n UNION \n SELECT bomber_id, application_id\n FROM bomber.bombing_history b\n WHERE promised_date >= '%s' AND created_at < '%s'\n AND EXISTS(select 1 from bomber.application ba \n where b.application_id=ba.id \n and (ba.finished_at is null \n or ba.finished_at > '%s'))) result\n GROUP BY 1\n \"\"\" % (begin_date, end_date, begin_date, begin_date, end_date, begin_date)\n kp_today = run_all_sql(sql)\n\n for kp in kp_today:\n (SummaryBomber.update(\n KP_today_cnt=kp[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == kp[0]\n )).execute()\n\n\n# 得到ptp相关信息(当日ptp到期件数、次日到期件数)\ndef get_ptp_cnt(begin_date, end_date):\n today_due = []\n for sql_date in (begin_date, end_date):\n sql = \"\"\"\n select bomber_id,count(distinct application_id) as cnt from \n ( # 自动外呼中排除掉已经修改P期的件\n select application_id,bomber_id,created_at \n from bomber.auto_call_actions ba \n where promised_date ='%s' # 需要过滤掉在手动中续P的\n and not exists ( select 1 from bomber.bombing_history bb \n where bb.application_id = ba.application_id \n and bb.bomber_id = ba.bomber_id \n and bb.created_at>ba.created_at\n and bb.promised_date is not null \n and bb.created_at < '%s')\n union #历史记录,排除因为续P,导致这个件不在当日的P中\n select b.application_id,b.bomber_id,a.cdt\n from bomber.bombing_history b\n inner join (\n select application_id,bomber_id,max(created_at) as cdt \n from bomber.bombing_history bb\n where bb.created_at>date_sub('%s',interval 7 day)\n and bb.created_at<'%s'\n and promised_date is not null\n group by 1,2) a \n on b.application_id=a.application_id \n and b.bomber_id=a.bomber_id and a.cdt=b.created_at\n where b.promised_date ='%s'\n union #当天下的当天的P\n select b.application_id,b.bomber_id,b.created_at\n from bomber.bombing_history b\n where b.promised_date ='%s'\n and b.created_at>'%s'\n and b.created_at<date_add('%s',interval 1 day)\n ) a\n where exists(select 1 from bomber.application ba \n where ba.id=a.application_id \n and ((ba.finished_at is null) \n or (ba.finished_at > '%s')))\n group by 1\n \"\"\" % (sql_date, begin_date, begin_date, begin_date, sql_date,\n sql_date, begin_date, begin_date, begin_date)\n datas = run_all_sql(sql)\n\n if sql_date == begin_date:\n today_due = datas\n for data in datas:\n (SummaryBomber.update(\n ptp_today_cnt=data[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == data[0]\n )).execute()\n continue\n nextday_due = datas\n for data in datas:\n (SummaryBomber.update(\n ptp_next_cnt=data[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == data[0]\n )).execute()\n return [today_due, nextday_due]\n\n\n# 得到ptp维护的相关信息\ndef get_ptp_call_cnt(begin_date, end_date):\n today_followed = []\n for sql_data in (begin_date, end_date):\n sql = \"\"\"\n select b.bomber_id,count(distinct b.application_id) as cnt \n from (\n select a.* from \n (\n select application_id,bomber_id,created_at \n from bomber.auto_call_actions ba \n where promised_date ='%s' # 需要过滤掉在手动中续P的\n and not exists (select 1 from bomber.bombing_history bb \n where bb.application_id = ba.application_id \n and bb.bomber_id = ba.bomber_id \n and bb.created_at>ba.created_at \n and bb.promised_date is not null \n and bb.created_at < '%s')\n union #历史记录,排除因为续P,导致这个件不在当日的P中\n select b.application_id,b.bomber_id,a.cdt\n from bomber.bombing_history b\n inner join (\n select application_id,bomber_id,max(created_at) as cdt \n from bomber.bombing_history bb\n where bb.created_at>date_sub('%s',interval 7 day)\n and bb.created_at<'%s'\n and promised_date is not null\n group by 1,2) a \n on b.application_id=a.application_id \n and b.bomber_id=a.bomber_id and a.cdt=b.created_at\n where b.promised_date ='%s'\n union #当天下的当天的P\n select b.application_id,b.bomber_id,b.created_at\n from bomber.bombing_history b\n where b.promised_date ='%s'\n and b.created_at>'%s'\n and b.created_at<date_add('%s',interval 1 day)\n ) a\n where exists(select 1 from bomber.application ba \n where ba.id=a.application_id \n and ((ba.finished_at is null) \n or (ba.finished_at > '%s')))\n and exists(select 1 from bomber.call_actions bc \n where a.application_id = bc.application_id \n and a.bomber_id = bc.bomber_id \n and bc.created_at>'%s' \n and bc.created_at< date_add('%s',interval 1 day) \n and bc.created_at>=a.created_at)\n union \n select a.* from \n (\n select application_id,bomber_id,created_at \n from bomber.auto_call_actions ba \n where promised_date ='%s' # 需要过滤掉在手动中续P的\n and not exists ( select 1 from bomber.bombing_history bb \n where bb.application_id = ba.application_id \n and bb.bomber_id = ba.bomber_id \n and bb.created_at>ba.created_at \n and bb.promised_date is not null \n and bb.created_at < '%s')\n union #历史记录,排除因为续P,导致这个件不在当日的P中\n select b.application_id,b.bomber_id,a.cdt\n from bomber.bombing_history b\n inner join (\n select application_id,bomber_id,max(created_at) as cdt \n from bomber.bombing_history bb\n where bb.created_at>date_sub('%s',interval 7 day)\n and bb.created_at<'%s'\n and promised_date is not null\n group by 1,2) a \n on b.application_id=a.application_id \n and b.bomber_id=a.bomber_id and a.cdt=b.created_at\n where b.promised_date ='%s'\n union #当天下的当天的P\n select b.application_id,b.bomber_id,b.created_at\n from bomber.bombing_history b\n where b.promised_date ='%s'\n and b.created_at>'%s'\n and b.created_at<date_add('%s',interval 1 day)\n ) a\n where exists(select 1 from bomber.application ba \n where ba.id=a.application_id \n and ba.finished_at > '%s' \n and ba.finished_at< date_add('%s',interval 1 day))\n ) b\n group by 1\n \"\"\" % (sql_data, begin_date, begin_date, begin_date, sql_data,\n sql_data, begin_date, begin_date, begin_date, begin_date,\n begin_date, sql_data, begin_date, begin_date, begin_date,\n sql_data, sql_data, begin_date, begin_date, begin_date,\n begin_date)\n datas = run_all_sql(sql)\n\n if sql_data == begin_date:\n today_followed = datas\n for data in datas:\n (SummaryBomber.update(\n ptp_today_call_cnt=data[1]\n ).where(\n SummaryBomber.bomber_id == data[0],\n SummaryBomber.time == begin_date\n )).execute()\n continue\n nextday_followed = datas\n for data in datas:\n (SummaryBomber.update(\n ptp_next_call_cnt=data[1]\n ).where(\n SummaryBomber.bomber_id == data[0],\n SummaryBomber.time == begin_date\n )).execute()\n return [today_followed, nextday_followed]\n\n\n# 得到新件还款金额(只有c2、c3才有新件还款的概念)\ndef get_new_case_cleared(begin_date, end_date):\n sql = \"\"\"\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c2_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c2_entry > '%s' \n AND ba.c2_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c3_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c3_entry > '%s' \n AND ba.c3_entry < '%s' \n ) a\n GROUP BY 1\n \"\"\" % (begin_date, end_date, begin_date, end_date,begin_date, end_date)\n case_cleared_sums = run_all_sql(sql)\n\n for clear in case_cleared_sums:\n (SummaryBomber.update(\n new_case_cleared_sum=clear[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == clear[0]\n )).execute()\n\n\n# 新件当日维护件数\n@time_logger\ndef get_new_case_call(begin_date, end_date, real_query_time=False):\n sql = \"\"\"\n SELECT\n bd.bomber_id,\n count( DISTINCT bd.application_id )\n FROM\n bomber.dispatch_app_history bd\n INNER JOIN bomber.call_actions bc \n ON bd.application_id = bc.application_id \n AND bd.bomber_id = bc.bomber_id \n AND date( bd.entry_at ) = date( bc.created_at ) \n WHERE\n entry_at > '%s' \n AND entry_at < '%s' \n AND partner_id IS NULL \n GROUP BY 1\n \"\"\" % (begin_date, end_date)\n new_case_calls = run_all_sql(sql)\n\n if real_query_time:\n return new_case_calls\n\n for call in new_case_calls:\n (SummaryBomber.update(\n new_case_call_cnt=call[1]\n ).where(\n SummaryBomber.bomber_id == call[0],\n SummaryBomber.time == begin_date\n )).execute()\n return new_case_calls\n\n\n# 得到接通件均通话时长\n@time_logger\ndef get_calltime_avg(begin_date, end_date, real_query_time=False):\n autos_sql = \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction,\n count( 1 ) AS auto_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' ' \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\" % (begin_date, end_date)\n autos = run_all_sql(autos_sql)\n\n manual_sql = \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n AND an.recording is not null\n GROUP BY 1\n \"\"\" % (begin_date, end_date, '5%', '3%')\n manuals = run_all_sql(manual_sql)\n\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]][0] += data[1]\n result[data[0]][1] += data[2]\n continue\n result[data[0]] = [data[1], data[2]]\n\n if real_query_time:\n return result\n\n for key, value in result.items():\n (SummaryBomber.update(\n calltime_case_sum=value[0],\n calltime_case_cnt=value[1],\n calltime_case_avg=value[0] / value[1] if value[1] else 0\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == key\n )).execute()\n return result\n\n\n# 得到等待时长相关数据\ndef get_no_calltime_avg(begin_date, end_date):\n manual_sql = \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND (an.status!='ANSWERED' or an.recording is null) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\" % (begin_date, end_date, '5%', '3%')\n manuals = run_all_sql(manual_sql)\n\n for data in manuals:\n (SummaryBomber.update(\n calltime_no_case_sum=data[1],\n calltime_no_case_cnt=data[2],\n calltime_no_case_avg=data[1] / data[2] if data[2] else 0\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == data[0]\n )).execute()\n\n\n# 得到通话总时长\n@time_logger\ndef get_calltime_sum(begin_date, end_date, real_query_time=False):\n autos_sql = \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' '\n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\" % (begin_date, end_date)\n autos = run_all_sql(autos_sql)\n\n manual_sql = \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\" % (begin_date, end_date, '5%', '3%')\n manuals = run_all_sql(manual_sql)\n\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]] += data[1]\n continue\n result[data[0]] = data[1]\n if real_query_time:\n return result\n for key, value in result.items():\n (SummaryBomber.update(\n calltime_sum=value\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == key\n )).execute()\n return result\n\n\n# 当天未跟进的件\ndef get_unfollowed(begin_date):\n sql = \"\"\"\n SELECT\n bomber_id,\n count(1)\n FROM\n (\n SELECT\n bd.application_id,\n date(bd.entry_at) AS entry_at,\n bd.bomber_id,\n date(bd.out_at) AS out_at\n FROM\n bomber.dispatch_app_history bd\n WHERE\n (\n out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)\n OR out_at IS NULL\n )\n AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)\n AND partner_id IS NULL\n AND NOT EXISTS (\n SELECT\n 1\n FROM\n bomber.call_actions bc\n WHERE\n bd.bomber_id = bc.bomber_id\n AND bc.application_id = bd.application_id\n AND bc.created_at < '%(begin_date)s'\n )\n ) a\n GROUP BY\n 1\n \"\"\" % {'begin_date': begin_date}\n data = run_all_sql(sql)\n\n result = defaultdict(int)\n for d in data:\n result[d[0]] += d[1]\n\n bomber_list = []\n for key, value in result.items():\n bomber_list.append(key)\n (SummaryBomber.update(\n unfollowed_cnt=SummaryBomber.new_case_cnt + value\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == key\n )).execute()\n\n # 剩下bomber_id直接由new_case_cnt赋值\n (SummaryBomber.update(\n unfollowed_cnt=SummaryBomber.new_case_cnt\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id.not_in(bomber_list)\n )).execute()\n\n\n# 未跟进件中当天跟进件数\ndef get_unfollowed_call(begin_date):\n sql = \"\"\"\n SELECT\n bomber_id,\n count(1)\n FROM\n (\n SELECT\n bd.application_id,\n date(bd.entry_at) AS entry_at,\n bd.bomber_id,\n date(bd.out_at) AS out_at\n FROM\n bomber.dispatch_app_history bd\n WHERE\n (\n out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)\n OR out_at IS NULL\n )\n AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)\n AND partner_id IS NULL\n AND NOT EXISTS (\n SELECT\n 1\n FROM\n bomber.call_actions bc\n WHERE\n bd.bomber_id = bc.bomber_id\n AND bc.application_id = bd.application_id\n AND bc.created_at < '%(begin_date)s'\n )\n ) a\n WHERE\n EXISTS (\n SELECT\n 1\n FROM\n bomber.call_actions bc\n WHERE\n a.application_id = bc.application_id\n AND a.bomber_id = bc.bomber_id\n AND bc.created_at > '%(begin_date)s'\n AND bc.created_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n AND bc.created_at >= a.entry_at\n )\n OR EXISTS (\n SELECT\n 1\n FROM\n bomber.application ba\n WHERE\n ba.id = a.application_id\n AND ba.finished_at > '%(begin_date)s'\n AND ba.finished_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n )\n GROUP BY\n 1\n \"\"\" % {'begin_date': begin_date}\n data = run_all_sql(sql)\n\n result = defaultdict(int)\n for d in data:\n result[d[0]] += d[1]\n\n bomber_list = []\n for key, value in result.items():\n bomber_list.append(key)\n (SummaryBomber.update(\n unfollowed_call_cnt=SummaryBomber.new_case_call_cnt + value\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == key\n )).execute()\n\n # 剩下bomber_id直接由new_case_cnt赋值\n update_sql = (SummaryBomber\n .update(unfollowed_call_cnt=SummaryBomber.new_case_call_cnt)\n .where(SummaryBomber.time == begin_date))\n if bomber_list:\n update_sql = update_sql.where(SummaryBomber.bomber_id\n .not_in(bomber_list))\n update_sql.execute()\n return result\n\n\n# summary 更新新的数据(计算summary_bomber的另一部分数据)\n@action(MessageAction.UPDATE_SUMMARY_NEW)\ndef update_summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n\n worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))\n .where(WorkerLog.created_at >= end_date,\n WorkerLog.action == 'UPDATE_SUMMARY_NEW')\n .first())\n if worker_log and worker_log.logs >= 5:\n return\n\n get_new_case_amount(begin_date, end_date)\n get_kp_cleared(begin_date, end_date)\n get_kp_today(begin_date, end_date)\n get_ptp_cnt(begin_date, end_date)\n get_ptp_call_cnt(begin_date, end_date)\n get_new_case_cleared(begin_date, end_date)\n get_new_case_call(begin_date, end_date)\n get_calltime_avg(begin_date, end_date)\n get_no_calltime_avg(begin_date, end_date)\n get_calltime_sum(begin_date, end_date)\n\n\n# -------------------------------- 得到cycle层的数据 --------------------------\ndef get_cycle_claimed(begin_date, end_date):\n sql = \"\"\"\n select cycle,count(1)\n from bomber.application where cycle in (1,2,3,4)\n and (finished_at is null or (finished_at>'%s'))\n and created_at>'2018-09-01'\n group by 1\n \"\"\" % begin_date\n result = run_all_sql(sql)\n return result\n\n\n# 得到cycle层的新件件数和金额\n@time_logger\ndef cycle_new_case(begin_date, end_date, real_time_query=False):\n sql = \"\"\"\n SELECT\n 1 AS cycle,\n count( ba.id ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.created_at ) = bo.which_day_overdue \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c1b_entry ) = bo.which_day_overdue\n WHERE\n c1b_entry > '%s' \n AND c1b_entry < '%s' \n UNION\n SELECT\n 3 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c2_entry ) = bo.which_day_overdue \n WHERE\n c2_entry > '%s' \n AND c2_entry < '%s' \n UNION\n SELECT\n 4 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c3_entry ) = bo.which_day_overdue\n WHERE\n c3_entry > '%s' \n AND c3_entry < '%s'\n \"\"\" % (begin_date, end_date, begin_date, end_date,\n begin_date, end_date, begin_date, end_date)\n all_datas = run_all_sql(sql)\n\n if real_time_query:\n return all_datas\n\n for data in all_datas:\n (SummaryBomber.update(\n new_case_amount_sum=data[2],\n new_case_cnt=data[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == data[0],\n SummaryBomber.cycle == data[0]\n )).execute()\n return all_datas\n\n\n# 新件当日维护件数\n@time_logger\ndef get_cycle_new_case_call(begin_date, end_date, real_time_query=False):\n sql = \"\"\"\n SELECT\n 1 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.created_at ) = date( bc.created_at ) \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c1b_entry ) = date( bc.created_at ) \n WHERE\n ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s'\n UNION\n SELECT\n 3 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c2_entry ) = date( bc.created_at ) \n WHERE\n ba.c2_entry > '%s' \n AND ba.c2_entry < '%s'\n UNION\n SELECT\n 4 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c3_entry ) = date( bc.created_at ) \n WHERE\n ba.c3_entry > '%s' \n AND ba.c3_entry < '%s'\n \"\"\" % (begin_date, end_date, begin_date, end_date,\n begin_date, end_date, begin_date, end_date)\n cycle_datas = run_all_sql(sql)\n\n if real_time_query:\n return cycle_datas\n\n for data in cycle_datas:\n (SummaryBomber.update(\n new_case_call_cnt=data[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.cycle == data[0],\n SummaryBomber.bomber_id == data[0]\n )).execute()\n return cycle_datas\n\n\ndef get_cycle_new_case_cleared(begin_date, end_date):\n sql = \"\"\"\n SELECT\n '1' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.created_at ) = date( br.repay_at ) \n WHERE ba.created_at > '%s' \n AND ba.created_at < '%s' \n GROUP BY 1, 2 ) a \n UNION\n SELECT \n '2' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n GROUP BY 1, 2) a\n \"\"\" % (begin_date, end_date, begin_date, end_date)\n cycle_cleared = run_all_sql(sql)\n\n for i in cycle_cleared:\n (SummaryBomber.update(\n new_case_cleared_sum=i[2]\n ).where(\n SummaryBomber.cycle == i[0],\n SummaryBomber.bomber_id == i[0],\n SummaryBomber.time == begin_date\n )).execute()\n\n\ndef get_cycle_case_made_cnt(begin_date, end_date):\n sql = \"\"\"\n select cycle,count(distinct application) from (\n select distinct cycle,application from bomber.auto_call_list_record\n where created_at >= '%s'\n and created_at < '%s'\n and called_counts <> 0\n and cycle in (1,2,3,4)\n union\n select distinct cycle,application_id from bomber.call_actions\n where created_at >= '%s'\n and created_at < '%s'\n and cycle in (1,2,3,4)\n ) c\n group by 1\n \"\"\" % (begin_date, end_date, begin_date, end_date)\n case_made_datas = run_all_sql(sql)\n\n for case_made_data in case_made_datas:\n (SummaryBomber.update(\n case_made_cnt=case_made_data[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.cycle == case_made_data[0],\n SummaryBomber.bomber_id == case_made_data[0]\n )).execute()\n\n\n# 得到cycle維度的数据\n@action(MessageAction.SUMMARY_NEW_CYCLE)\ndef summary_new_cycle(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n\n worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))\n .where(WorkerLog.created_at >= end_date,\n WorkerLog.action == 'SUMMARY_NEW_CYCLE')\n .first())\n if worker_log and worker_log.logs >= 5:\n return\n\n cycle_datas = (SummaryBomber\n .select(fn.SUM(SummaryBomber.new_case_amount_sum)\n .alias('new_case_amount_sum'),\n fn.SUM(SummaryBomber.new_case_cleared_sum)\n .alias('new_case_cleared_sum'),\n fn.SUM(SummaryBomber.case_made_cnt)\n .alias('case_made_cnt'),\n fn.SUM(SummaryBomber.case_connect_cnt)\n .alias('case_connect_cnt'),\n fn.SUM(SummaryBomber.promised_cnt)\n .alias('promised_cnt'),\n fn.SUM(SummaryBomber.promised_amount)\n .alias('promised_amount'),\n fn.SUM(SummaryBomber.cleared_cnt)\n .alias('cleared_cnt'),\n fn.SUM(SummaryBomber.cleared_amount)\n .alias('cleared_amount'),\n fn.SUM(SummaryBomber.new_case_cnt)\n .alias('new_case_cnt'),\n fn.SUM(SummaryBomber.new_case_call_cnt)\n .alias('new_case_call_cnt'),\n fn.SUM(SummaryBomber.unfollowed_cnt)\n .alias('unfollowed_cnt'),\n fn.SUM(SummaryBomber.unfollowed_call_cnt)\n .alias('unfollowed_call_cnt'),\n fn.SUM(SummaryBomber.call_cnt).alias('call_cnt'),\n fn.SUM(SummaryBomber.sms_cnt).alias('sms_cnt'),\n fn.SUM(SummaryBomber.call_connect_cnt)\n .alias('call_connect_cnt'),\n fn.SUM(SummaryBomber.ptp_today_cnt)\n .alias('ptp_today_cnt'),\n fn.SUM(SummaryBomber.ptp_today_call_cnt)\n .alias('ptp_today_call_cnt'),\n fn.SUM(SummaryBomber.ptp_next_cnt)\n .alias('ptp_next_cnt'),\n fn.SUM(SummaryBomber.ptp_next_call_cnt)\n .alias('ptp_next_call_cnt'),\n fn.SUM(SummaryBomber.KP_cleared_cnt)\n .alias('KP_cleared_cnt'),\n fn.SUM(SummaryBomber.KP_today_cnt)\n .alias('KP_today_cnt'),\n fn.SUM(SummaryBomber.work_ind).alias('work_ind'),\n fn.SUM(SummaryBomber.calltime_sum)\n .alias('calltime_sum'),\n fn.SUM(SummaryBomber.calltime_case_sum)\n .alias('calltime_case_sum'),\n fn.SUM(SummaryBomber.calltime_case_cnt)\n .alias('calltime_case_cnt'),\n fn.SUM(SummaryBomber.calltime_no_case_sum)\n .alias('calltime_no_case_sum'),\n fn.SUM(SummaryBomber.calltime_no_case_cnt)\n .alias('calltime_no_case_cnt'),\n SummaryBomber.cycle.alias('cycle'))\n .where(SummaryBomber.time == begin_date,\n SummaryBomber.cycle << Cycle.values())\n .group_by(SummaryBomber.cycle))\n\n for cycle_data in cycle_datas:\n SummaryBomber.create(\n bomber_id=cycle_data.cycle,\n time=begin_date,\n cycle=cycle_data.cycle,\n new_case_amount_sum=cycle_data.new_case_amount_sum, # 新件金额(同上)\n new_case_cleared_sum=cycle_data.new_case_cleared_sum, # 新件还款(同上)\n new_case_cleard_rate=0,\n case_made_cnt=cycle_data.case_made_cnt, # 拨打件数\n case_made_rate=0,\n case_connect_cnt=cycle_data.case_connect_cnt, # 接通件数\n case_connect_rate=0,\n promised_cnt=cycle_data.promised_cnt, # ptp件数\n promised_amount=cycle_data.promised_amount, # ptp金额\n cleared_cnt=cycle_data.cleared_cnt, # 回款件数\n cleared_amount=cycle_data.cleared_amount, # 回款金额\n new_case_cnt=cycle_data.new_case_cnt, # 新件数量(1,2待算)\n new_case_call_cnt=cycle_data.new_case_call_cnt, # 新件拨打数(同上)\n unfollowed_cnt=cycle_data.unfollowed_cnt,\n unfollowed_call_cnt=cycle_data.unfollowed_call_cnt,\n call_cnt=cycle_data.call_cnt, # 拨打电话数\n sms_cnt=cycle_data.sms_cnt, # 发送短信数\n call_connect_cnt=cycle_data.call_connect_cnt, # 接通电话数\n calltime_case_avg=0, # 接通件均通话时长 (全部待算)\n ptp_today_cnt=cycle_data.ptp_today_cnt, # 当日ptp件数\n ptp_today_call_cnt=cycle_data.ptp_today_call_cnt, # 当日ptp到期维护件数\n ptp_next_cnt=cycle_data.ptp_next_cnt, # 次日ptp到期数\n ptp_next_call_cnt=cycle_data.ptp_next_call_cnt, # 次日到期维护数\n KP_cleared_cnt=cycle_data.KP_cleared_cnt, # kp回款件\n KP_today_cnt=cycle_data.KP_today_cnt, # 当日处于ptp件数\n KP_cleared_rate=0,\n work_ind=cycle_data.work_ind, # 当日是否工作\n calltime_sum=cycle_data.calltime_sum, # 通话总时长\n calltime_case_sum=cycle_data.calltime_case_sum,\n calltime_case_cnt=cycle_data.calltime_case_cnt,\n calltime_no_case_sum=cycle_data.calltime_no_case_sum,\n calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,\n work_time_sum=cycle_data.work_time_sum # 工作时长\n )\n\n cycle_claimed = get_cycle_claimed(begin_date, end_date)\n for claimed in cycle_claimed:\n (SummaryBomber.update(\n claimed_cnt=claimed[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.cycle == claimed[0],\n SummaryBomber.bomber_id == claimed[0]\n )).execute()\n\n # 得到新件件数和金额\n cycle_new_case(begin_date, end_date)\n\n # 得到新件维护件数\n get_cycle_new_case_call(begin_date, end_date)\n\n # 得到新件還款金額\n get_cycle_new_case_cleared(begin_date, end_date)\n\n # 修改cycle的拨打件数(累加对于预测试外呼都是打通的)\n get_cycle_case_made_cnt(begin_date, end_date)\n\n # 得到计算类数据(各比率)\n all_datas = (SummaryBomber.filter(SummaryBomber.time == begin_date))\n for data in all_datas:\n cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum\n if data.new_case_amount_sum else 0) * 100\n data.new_case_cleard_rate = cl_rat\n\n case_made_rate = (data.case_made_cnt / data.claimed_cnt\n if data.claimed_cnt else 0) * 100\n data.case_made_rate = case_made_rate\n\n case_connect_rate = (data.case_connect_cnt / data.case_made_cnt\n if data.case_made_cnt else 0) * 100\n data.case_connect_rate = case_connect_rate\n\n calltime_case_avg = (data.calltime_case_sum / data.calltime_case_cnt\n if data.calltime_case_cnt else 0)\n data.calltime_case_avg = calltime_case_avg\n\n calltime_no_case_avg = (data.calltime_no_case_sum /\n data.calltime_no_case_cnt\n if data.calltime_no_case_cnt else 0)\n data.calltime_no_case_avg = calltime_no_case_avg\n\n KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt\n if data.KP_today_cnt else 0) * 100\n data.KP_cleared_rate = KP_cleared_rate\n\n data.save()\n\n\n@action(MessageAction.MODIFY_BILL)\ndef modify_bill(payload, msg_id):\n application_id = payload.get('external_id')\n principal_paid = Decimal(payload.get('principal_paid', 0))\n late_fee = Decimal(payload.get('late_fee', 0))\n late_fee_paid = Decimal(payload.get('late_fee_paid', 0))\n overdue_days = payload.get('overdue_days')\n sub_bill_id = payload.get('bill_sub_id')\n partner_bill_id = payload.get('partner_bill_id')\n if not application_id:\n logging.warning('payload has no external_id. {}'.format(str(payload)))\n return\n if not overdue_days:\n logging.info(\"application %s not overdue\" % application_id)\n return\n\n item = (OldLoanApplication\n .get_or_none(OldLoanApplication.application_id ==\n application_id))\n if item:\n start_old_application(item, cancel=True)\n\n overdue_bill = (OverdueBill.select()\n .where(OverdueBill.external_id == application_id,\n OverdueBill.sub_bill_id == sub_bill_id)\n .first())\n application = (Application.filter(Application.id == application_id)\n .first())\n if not overdue_bill:\n if not application:\n logging.info('application %s not in bomber, let it in bomber now',\n application_id)\n send_to_default_q(MessageAction.APPLICATION_BOMBER, {\n 'id': application_id,\n 'bill_sub_id': sub_bill_id\n })\n return\n else:\n application = (Application\n .filter(Application.id == overdue_bill.collection_id)\n .first())\n\n with db.atomic():\n application.status = ApplicationStatus.UNCLAIMED.value\n application.finished_at = None\n application.paid_at = None\n application.save()\n if overdue_bill:\n overdue_bill.status = ApplicationStatus.UNCLAIMED.value\n overdue_bill.finished_at = None\n overdue_bill.save()\n repayment = (RepaymentLog.update(no_active = 1)\n .where(RepaymentLog.application == application.id,\n RepaymentLog.partner_bill_id == partner_bill_id,\n RepaymentLog.overdue_bill_id == overdue_bill.id))\n else:\n repayment = (RepaymentLog.update(no_active=1)\n .where(RepaymentLog.application == application.id,\n RepaymentLog.partner_bill_id == partner_bill_id))\n repayment_num = repayment.execute()\n logging.info(\"modify_bill no active repayment count:%s\" % repayment_num)\n\n if not application.latest_bomber_id:\n return\n\n bomber_id = application.latest_bomber_id\n (DispatchAppHistory.update(\n out_at=None,\n out_overdue_days=overdue_days,\n out_principal_pending=(application.amount - principal_paid),\n out_late_fee_pending=(late_fee - late_fee_paid)\n ).where(\n DispatchAppHistory.application == application.id,\n DispatchAppHistory.bomber_id == bomber_id)).execute()\n\n\n# 获取改变的ids\ndef get_change_bomber():\n cycle_role_map = {5: Cycle.C1B.value, 6: Cycle.C2.value, 8: Cycle.C3.value}\n result = {}\n bomber_logs = (BomberLog.select(BomberLog.bomber_id,\n BomberLog.role_id,\n BomberLog.operation,\n Bomber.group_id)\n .join(Bomber, JOIN_INNER,\n on=BomberLog.bomber_id == Bomber.id)\n .where(fn.DATE(BomberLog.created_at) == date.today(),\n BomberLog.role_id << list(cycle_role_map.keys()),#C1b,c2,c3\n BomberLog.operation << (0, 1), #0删除,1创建,3修改\n Bomber.instalment == 0) #催收单期的员工\n .dicts())\n for b_log in bomber_logs:\n cycle = cycle_role_map.get(b_log[\"role_id\"])\n group_id = b_log[\"group_id\"]\n if cycle in result:\n if group_id not in result[cycle]:\n result[cycle][group_id] = {\n \"cycle\": cycle,\n \"del_ids\": [],\n \"new_ids\": []\n }\n else:\n result[cycle] = {group_id: {\n \"cycle\": cycle,\n \"del_ids\": [],\n \"new_ids\": []}\n }\n if b_log[\"operation\"] == 0:\n result[cycle][group_id][\"del_ids\"].append(b_log[\"bomber_id\"])\n # result 有值表示有人员变动\n if result:\n bombers = (Bomber.select()\n .where(Bomber.role.in_(list(cycle_role_map.keys())),\n Bomber.is_del == 0,\n Bomber.instalment == 0))\n for b in bombers:\n cycle_result = result.get(cycle_role_map[b.role_id], {})\n role_result = cycle_result.get(b.group_id)\n if not role_result:\n continue\n role_result[\"new_ids\"].append(b.id)\n resutl_list = []\n for cycle, group_dict in result.items():\n resutl_list.extend(list(group_dict.values()))\n return resutl_list\n return []\n\n\n# 获取所有的application\ndef get_total_application(cycle, del_ids, new_ids,\n type=ApplicationType.CASH_LOAN.value):\n bomber_list = del_ids + new_ids\n all_apps = (Application.select(Application.id,\n Application.latest_bomber_id.alias(\n \"latest_bomber_id\"),\n Application.promised_date,\n Bomber.partner_id.alias(\"partner_id\"))\n .join(Bomber, JOIN_LEFT_OUTER,\n Application.latest_bomber == Bomber.id)\n .where(Application.cycle == cycle,\n Application.status != ApplicationStatus.REPAID.value,\n Application.latest_bomber_id << bomber_list,\n Application.type == type)\n .order_by(Application.id)\n .dicts())\n return all_apps\n\n\n# 获取平均数列表,即每个bomber的平均件的数量\ndef get_average_number(app_nums, bomber_nums):\n average = app_nums // bomber_nums\n remainder = app_nums % bomber_nums\n average_list = [average for i in range(bomber_nums)]\n if remainder == 0:\n return average_list\n for i in range(remainder):\n average_list[i] += 1\n # 对结果进行一下随机,不然每次都是前几个人多件\n random.shuffle(average_list)\n return average_list\n\n\n# 对appliciton进行分类统计\ndef classified_statistic_apps(apps):\n result = {}\n # 根据用户的bomber_id 对数据进行分类统计\n for app in apps:\n # 将用户下p和没下p的件分开\n latest_bomber_id = app[\"latest_bomber_id\"]\n if latest_bomber_id not in result:\n result[latest_bomber_id] = {\n \"bid\":latest_bomber_id,\n \"p_list\": [],\n \"np_list\": [],\n \"partner_id\": app[\"partner_id\"] if app[\"partner_id\"] else \"\",\n }\n promised_date = app.get(\"promised_date\")\n if not promised_date or promised_date.date() < date.today():\n result[latest_bomber_id]['np_list'].append(app[\"id\"])\n else:\n result[latest_bomber_id]['p_list'].append(app[\"id\"])\n return result\n\n\n# 获取多余的件,并且计算每个人所需要的件\ndef get_surplus_application(new_ids, del_ids, average_nums, classified_apps):\n surplus_apps = []\n # 如果id在删除队列中,将对应id所有的件重新分配\n for del_id in del_ids:\n del_res = classified_apps.get(del_id,{})\n p_list = del_res.get(\"p_list\", [])\n np_list = del_res.get(\"np_list\", [])\n del_res[\"need_num\"] = -(len(p_list) + len(np_list))\n del_res[\"to_list\"] = np_list + p_list\n surplus_apps.extend(p_list)\n surplus_apps.extend(np_list)\n # 计算每个用户的下p和没下p的件的个数,和自己需要的件的个数\n for index, bid in enumerate(new_ids):\n average = average_nums[index]\n bomber_app = classified_apps.get(bid)\n if not bomber_app:\n # 获取partner_id\n bomber = (Bomber.select(Bomber.partner_id)\n .where(Bomber.id == bid)\n .first())\n bomber_app = {\n \"bid\": bid,\n \"p_list\": [],\n \"p_num\": 0,\n \"np_list\": [],\n \"np_num\": 0,\n \"need_num\": average,\n \"partner_id\": bomber.partner_id if bomber else ''\n }\n classified_apps[bid] = bomber_app\n else:\n p_num = len(bomber_app[\"p_list\"])\n np_num = len(bomber_app[\"np_list\"])\n # 如果下p件大于平均值,直接将他剩余所有件都放入到多余列表中\n if p_num > average:\n bomber_app[\"need_num\"] = - np_num\n else:\n bomber_app[\"need_num\"] = average - (p_num + np_num)\n bomber_app[\"p_num\"] = p_num\n bomber_app[\"np_num\"] = np_num\n # 将多余的件放入到多余列表中\n if bomber_app[\"need_num\"] < 0:\n # 将件随机,确保分件的逾期天数尽量均匀\n random.shuffle(bomber_app[\"np_list\"])\n res_over = bomber_app[\"np_list\"][:-bomber_app[\"need_num\"]]\n bomber_app[\"to_list\"] = res_over\n surplus_apps.extend(res_over)\n # 按照need_num进行排序\n classified_apps_list = sorted(classified_apps.values(),\n key=lambda x:x[\"need_num\"],\n reverse=True)\n return surplus_apps, classified_apps_list\n\n\n# 更新数据库数据,进行分件\ndef update_applications(surplus_apps, classified_apps, cycle):\n # 多余得件进行随机\n random.shuffle(surplus_apps)\n for app in classified_apps:\n status = 0\n try:\n if app[\"need_num\"] > 0:\n from_list = surplus_apps[:app[\"need_num\"]]\n # 移除surplus_apps中的元素\n for i in from_list: surplus_apps.remove(i)\n app[\"from_list\"] = from_list\n with db.atomic():\n q = Application.update(\n {Application.latest_bomber_id: app[\"bid\"]}).where(\n Application.id.in_(from_list))\n q.execute()\n # 分件入案\n in_record_params = {\n \"dest_bomber_id\": app[\"bid\"],\n \"application_ids\": from_list,\n \"dest_partner_id\": app[\"partner_id\"],\n \"cycle\": cycle,\n }\n new_in_record(**in_record_params)\n status = 1\n elif app[\"need_num\"] < 0:\n #分件出案\n out_record_params = {\n \"src_bomber_id\": app[\"bid\"],\n \"application_ids\": app[\"to_list\"]\n }\n new_out_record(**out_record_params)\n status = 1\n else:\n status = 1\n except Exception as e:\n logging.error(\"分件异常,params:%s,error:%s\"%(app,str(e)))\n #记录操作日志\n log_params = {\n \"bomber_id\": app[\"bid\"],\n \"form_ids\": json.dumps(app.get(\"from_list\", [])),\n \"to_ids\": json.dumps(app.get(\"to_list\", [])),\n \"need_num\": app.get(\"need_num\"),\n \"np_ids\": json.dumps(app.get(\"np_list\", [])),\n \"p_ids\": json.dumps(app.get(\"p_list\", [])),\n \"status\": status\n }\n DispatchAppLogs.create(**log_params)\n return classified_apps\n\n\n# 人员变动分配分期的催收单\ndef get_instalment_change_bomber():\n result ={}\n bomber_logs = (BomberLog.select(BomberLog.bomber_id,\n BomberLog.operation,\n Bomber.instalment,\n Bomber.group_id)\n .join(Bomber, JOIN_INNER,\n on=BomberLog.bomber_id == Bomber.id)\n .where(fn.DATE(BomberLog.created_at) == date.today(),\n BomberLog.operation << [0,1],\n Bomber.instalment > 0)\n .dicts())\n for bl in bomber_logs:\n cycle = bl[\"instalment\"]\n group_id = bl[\"group_id\"]\n if cycle not in result:\n result[cycle] = {group_id: {\n \"cycle\": cycle,\n \"del_ids\": [],\n \"new_ids\": []\n }}\n else:\n if group_id not in result[cycle]:\n result[cycle][group_id] = {\n \"cycle\": cycle,\n \"del_ids\": [],\n \"new_ids\": []}\n if bl[\"operation\"] == 0:\n result[cycle][group_id][\"del_ids\"].append(bl[\"bomber_id\"])\n if result:\n instalments = list(result.keys())\n bombers = (Bomber.select()\n .where(Bomber.instalment << instalments,\n Bomber.is_del == 0))\n for b in bombers:\n cycle_result = result.get(b.instalment, {})\n group_result = cycle_result.get(b.group_id)\n if not group_result:\n continue\n group_result[\"new_ids\"].append(b.id)\n result_list = []\n for cycle,group_dict in result.items():\n result_list.extend(list(group_dict.values()))\n return result_list\n return []\n\ndef instalment_update_applications(surplus_apps, classified_apps, cycle):\n end = 0\n for app in classified_apps:\n if app[\"need_num\"] <= 0:\n continue\n start = end\n end = start + app[\"need_num\"]\n aids = surplus_apps[start:end]\n app[\"from_list\"] = aids\n status = 0\n with db.atomic():\n q = (Application.update(last_bomber = Application.latest_bomber,\n latest_bomber = app[\"bid\"],\n ptp_bomber = None)\n .where(Application.id << aids)\n .execute())\n # 入案和出案\n record_param = {\n \"cycle\": cycle,\n \"application_ids\": aids,\n \"dest_bomber_id\": app[\"bid\"],\n \"dest_partner_id\": app[\"partner_id\"],\n }\n out_and_in_record_instalment(**record_param)\n status = 1\n # 记录操作日志\n log_params = {\n \"bomber_id\": app[\"bid\"],\n \"form_ids\": json.dumps(app.get(\"from_list\", [])),\n \"to_ids\": json.dumps(app.get(\"to_list\", [])),\n \"need_num\": app.get(\"need_num\"),\n \"np_ids\": json.dumps(app.get(\"np_list\", [])),\n \"p_ids\": json.dumps(app.get(\"p_list\", [])),\n \"status\": status\n }\n DispatchAppLogs.create(**log_params)\n return classified_apps\n\n# 执行人员变动分件\ndef change_bomber_dispatch_apps(change_bombers,\n type=ApplicationType.CASH_LOAN.value):\n if not change_bombers:\n return\n for bombers in change_bombers:\n del_ids = bombers.get(\"del_ids\", [])\n new_ids = bombers.get(\"new_ids\", [])\n cycle = bombers.get(\"cycle\")\n if not all([new_ids, cycle]):\n logging.info(\n \"获取需要分件的信息异常,bomber:%s,type:%s\" % (bombers, type))\n continue\n # 获取总apps\n apps = get_total_application(cycle, del_ids, new_ids, type)\n if not apps:\n logging.info(\n \"分件没有获取到对应的件,bomber:%s,type:%s\" % (bombers, type))\n continue\n # 获取平均数列表\n average_nums = get_average_number(len(apps), len(new_ids))\n # 分类统计apps\n classified_apps = classified_statistic_apps(apps)\n # 计算每个人需要分的件和多余的件\n superlus_apps, classified_apps = get_surplus_application(new_ids,\n del_ids,\n average_nums,\n classified_apps)\n # 分件,更新数据库\n if type == ApplicationType.CASH_LOAN.value:\n result = update_applications(superlus_apps, classified_apps, cycle)\n elif type == ApplicationType.CASH_LOAN_STAGING.value:\n result = instalment_update_applications(superlus_apps,\n classified_apps,\n cycle)\n else:\n logging.info(\"人员变动触发分件,unknown type:%s\" % type)\n\n logging.info(\"人员变动触发的分件:result:%s,type:%s\" % (result, type))\n\n\n#bomber人员变动,进行分件\n@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)\ndef bomber_dispatch_applications(payload, msg_id):\n #通过当天的登录日志,判断人员变动,若删除bomber_log会记录\n change_bombers = get_change_bomber()\n instalment_change_bombers = get_instalment_change_bomber()\n params = {ApplicationType.CASH_LOAN.value: change_bombers,\n ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}\n for type,bombers in params.items():\n change_bomber_dispatch_apps(change_bombers=bombers,type=type)\n\n\n@action(MessageAction.REPAIR_BOMBER)\ndef repair_bomber(payload, msg_id):\n app_mobile = payload['app_mobile']\n username = payload.get('user_name')\n logging.info('start repair bomber, number: %s' % app_mobile)\n\n # 得到用户填写的EC,确认该EC号码是否在催收中,并存储关系\n if 'mobile_no' in payload and payload['mobile_no']:\n mobile = number_strip(str(payload['mobile_no']))[:64]\n name = payload.get('mobile_name')\n application = Application.filter(Application.user_mobile_no == mobile)\n if application.exists():\n repair_contact(app_mobile, application, username)\n add_relationship(app_mobile, mobile, username, name)\n\n if 'tel_no' in payload and payload['tel_no']:\n tel_no = number_strip(str(payload['tel_no']))[:64]\n name = payload.get('tel_name')\n application = Application.filter(Application.user_mobile_no == tel_no)\n if application.exists():\n repair_contact(app_mobile, application, username)\n add_relationship(app_mobile, tel_no, username, name)\n\n\ndef repair_contact(number, application, name):\n # 填写的ec有过逾期则将号码加入contact中\n application = application.first()\n contact = (Contact\n .filter(Contact.user_id == application.user_id,\n Contact.number == number))\n if not contact.exists():\n Contact.create(\n user_id=application.user_id,\n name=name,\n number=number,\n relationship=Relationship.FAMILY.value,\n source='repair ec',\n real_relationship=Relationship.FAMILY.value\n )\n logging.info('add repair contact success, number: %s' % number)\n\n\ndef add_relationship(number, ec_number, username, name):\n # 存储关系\n query = (TotalContact\n .objects(src_number=str(number),\n dest_number=ec_number,\n source=20,\n is_calc=False\n )\n .first())\n if not query:\n TotalContact(\n src_number=str(number),\n src_name=username,\n dest_number=ec_number,\n dest_name=name,\n source=20).save()\n logging.info('add relationship success, number: %s' % number)\n\n\n# 获取要统计的时间范围\ndef get_summary_daily_time():\n mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')\n mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')\n now_date = datetime.now()\n now_date_time = now_date.time()\n today_str = str(now_date.date())\n if now_date_time < mid_time_t1.time():\n yes_date = now_date - timedelta(days=1)\n yes_date_str = str(yes_date.date())\n begin_str = yes_date_str + ' 17:20:00'\n end_str = today_str + ' 00:00:00'\n elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():\n begin_str = today_str + ' 00:00:00'\n end_str = today_str + ' 12:40:00'\n else:\n begin_str = today_str + ' 12:40:00'\n end_str = today_str + ' 17:20:00'\n begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')\n end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')\n # 记录统计的是哪天的数据\n summary_datetime = now_date-timedelta(minutes=30)\n summary_date = summary_datetime.date()\n return begin_time, end_time, summary_date\n\n# 每天12:40 和 17:20 和 凌晨 更新当天数据\n@action(MessageAction.SUMMARY_DAILY)\ndef summary_daily_data(payload, msg_id):\n begin_time, end_time, summary_date = get_summary_daily_time()\n call_actions = (CallActionsR.select(CallActionsR.id,\n CallActionsR.bomber_id,\n CallActionsR.application_id,\n CallActionsR.promised_date,\n CallActionsR.cycle,\n CallActionsR.name,\n CallActionsR.number)\n .where(CallActionsR.created_at >= begin_time,\n CallActionsR.created_at < end_time,\n CallActionsR.type << (0,1)))\n summary_dailys = {}\n for call in call_actions:\n if call.bomber_id not in summary_dailys:\n summary_dailys[call.bomber_id] = {'ptp_cnt': 0,\n 'call_cnt': 0,\n 'cycle': call.cycle,\n 'repayment': 0,\n 'bomber_id': call.bomber_id,\n 'summary_date':str(summary_date)}\n\n # C2,C3的下p的件会多一条没有number和name的数据\n if call.name and call.number:\n summary_dailys[call.bomber_id]['call_cnt'] += 1\n\n if call.promised_date:\n summary_dailys[call.bomber_id]['ptp_cnt'] += 1\n\n # 获取回款信息\n C1_sql = \"\"\"\n SELECT a.current_bomber_id,\n sum(principal_part+late_fee_part) as pay_amount,a.cycle\n from \n (select a.cycle,a.current_bomber_id,b.username,a.principal_part,\n a.late_fee_part,a.application_id,a.repay_at\n FROM bomber.repayment_log a ,bomber.bomber b\n WHERE a.repay_at >= '%s' AND a.repay_at <'%s'\n AND a.current_bomber_id !=''\n AND a.current_bomber_id = b.id\n and b.role_id in (1,2,4,5)\n and principal_part+late_fee_part>0\n group by 6,7) a\n GROUP BY a.cycle,a.current_bomber_id\n \"\"\" % (begin_time, end_time)\n C1_repayment = run_all_sql(C1_sql)\n other_sql = \"\"\"\n select current_bomber_id,sum(pay_amount) as pay_amount,cycle\n from (\n select application_id,current_bomber_id,pay_amount,repay_at,cycle\n from (\n select br.application_id,br.current_bomber_id,\n br.principal_part+br.late_fee_part as pay_amount,br.repay_at,\n br.cycle\n from bomber.repayment_log br\n left join bomber.bomber bb on br.current_bomber_id=bb.id\n where exists (select 1 from bomber.bombing_history bb \n where br.current_bomber_id=bb.bomber_id \n and br.application_id=bb.application_id \n and bb.created_at<br.repay_at \n and (bb.promised_date is not null \n or bb.promised_amount is not null))\n and br.repay_at >= '%s'\n and br.repay_at < '%s'\n and bb.role_id in (3,6,7,8,9) \n and br.principal_part+br.late_fee_part > 0\n group by 1,4\n ) a\n group by 1,4) b\n group by 1\n \"\"\" % (begin_time, end_time)\n other_repayment = run_all_sql(other_sql)\n all_repayment = C1_repayment + other_repayment\n for res in all_repayment:\n bomber_id,pay_amount,cycle = res\n if bomber_id in summary_dailys:\n summary_dailys[bomber_id]['repayment'] += pay_amount\n else:\n summary_dailys[bomber_id] = {'ptp_cnt': 0,\n 'call_cnt': 0,\n 'cycle': cycle,\n 'repayment': pay_amount,\n 'bomber_id': bomber_id,\n 'summary_date': str(summary_date)\n }\n insert_values = list(summary_dailys.values())\n if insert_values:\n SummaryDaily.insert_many(insert_values).execute()\n\n# 获取本cycle所有没完成的件\ndef get_cycle_all_no_paid_app(cycle, type=None):\n apps = (Application\n .select(Application.id,\n Application.latest_bomber_id,\n Application.ptp_bomber,\n Application.promised_date,\n Application.cycle)\n .where(Application.cycle == cycle,\n Application.status != ApplicationStatus.REPAID.value,\n Application.type == type)\n .dicts())\n\n dis_app_ids = [a['id'] for a in apps]\n # 将dispatch_app中的件状态更新\n with db.atomic():\n for idx in range(0, len(dis_app_ids), 1000):\n ids = dis_app_ids[idx:idx + 1000]\n q = (DispatchApp.update(status = DisAppStatus.ABNORMAL.value)\n .where(DispatchApp.application << ids)\n .execute())\n return apps\n\n# 根据bomber_id整理app\ndef get_app_logs(apps):\n app_logs = {}\n all_np_apps = []\n all_p_apps = []\n for a in apps:\n latest_bomber = a[\"latest_bomber\"]\n # 2 代替催收单中latest_bomber是空的情况,\n latest_bomber = a[\"cycle\"] if not latest_bomber else latest_bomber\n if latest_bomber in app_logs:\n app_logs[latest_bomber][\"to_ids\"].append(a[\"id\"])\n else:\n app_logs[latest_bomber] = {\"bomber_id\": latest_bomber,\n \"to_ids\": [a[\"id\"]],\n \"np_ids\": [],\n \"p_ids\": []}\n if (a[\"promised_date\"] and\n a[\"promised_date\"].date() >= datetime.now().date()):\n app_logs[latest_bomber][\"p_ids\"].append(a[\"id\"])\n all_p_apps.append(a)\n else:\n app_logs[latest_bomber][\"np_ids\"].append(a[\"id\"])\n all_np_apps.append(a)\n return app_logs, all_np_apps, all_p_apps\n\n# 月底分件给外包员工\ndef month_dispatch_app_out_partner(cycle,apps,app_logs,np_apps):\n # 件随机\n apps = list(apps)\n np_apps = list(np_apps)\n random.shuffle(np_apps)\n apps_len = len(apps)\n np_apps_len = len(np_apps)\n end = 0\n all_app_precentage = 0\n # 获取这个cycle所有的的外包\n partners = (Partner.select()\n .where(Partner.cycle == cycle,\n Partner.status == PartnerStatus.NORMAL.value))\n for p in partners:\n all_app_precentage += p.app_percentage\n\n for partner in partners:\n # 获取外包人员\n bombers = (Bomber.select()\n .where(Bomber.partner == partner.id,\n Bomber.is_del == 0,\n Bomber.status != BomberStatus.OUTER_LEADER.value))\n bids = {b.id:b for b in bombers}\n if len(bids) == 0:\n logging.info(\"cycle:%s,partner:%s,no bomber\"%(cycle, partner.id))\n continue\n start = end\n if np_apps_len >= int(apps_len * all_app_precentage):\n end = start + int(apps_len * partner.app_percentage)\n else:\n end = (start +\n int(np_apps_len * partner.app_percentage / all_app_precentage))\n # 外包团队应该获分到的所有件\n partner_app = np_apps[start:end]\n dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)\n # 剩余给内部员工的件\n np_apps = np_apps[end:]\n return np_apps\n\n\n# 内部员工分\ndef month_dispatch_app_inner(cycle,np_apps,app_logs,p_apps):\n sys_cycle = {1: 'AB_TEST_C1A',\n 2: 'AB_TEST_C1B',\n 3: 'AB_TEST_C2',\n 4: 'AB_TEST_C3'}\n # 获取内容部员工\n sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])\n sys_values = json.loads(sys_config.value)\n bombers = (Bomber.select().where(Bomber.id << sys_values,\n Bomber.is_del == 0))\n if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):\n bombers = bombers.where(Bomber.instalment == 0)\n bids = {b.id:b for b in bombers}\n # c1b没有下p的件要进自动外呼\n if cycle == Cycle.C1A.value:\n np_ids = [a[\"id\"] for a in np_apps]\n # 更新没有下p的件\n np = (Application\n .update(status = ApplicationStatus.PROCESSING.value,\n ptp_bomber = None,\n latest_bomber = None)\n .where(Application.id << np_ids)\n .execute())\n bomber_app_logs = app_logs.get(cycle, {})\n # 月底分件的时候,进自动外呼的件也要有入案和出案记录\n out_param = {\n \"application_ids\": bomber_app_logs.get(\"to_ids\", []),\n \"month_dispatch\": 1,\n \"src_bomber_id\": cycle,\n }\n new_out_record(**out_param)\n in_param = {\n \"cycle\": cycle,\n \"application_ids\": np_ids,\n \"dest_bomber_id\": cycle\n }\n new_in_record(**in_param)\n bomber_app_logs[\"need_num\"] = len(np_apps)\n bomber_app_logs[\"form_ids\"] = np_ids\n bomber_app_logs[\"status\"] = 1\n else:\n dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)\n\n dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)\n\n# 把件分给bomber\ndef dispatch_apps_to_bomber(cycle,apps,bids,app_logs,out_partner=True,\n type=ApplicationType.CASH_LOAN.value):\n apps = list(apps)\n random.shuffle(apps)\n # 获取每个人应该分个数\n bids_list = list(bids.keys())\n if len(bids_list) <= 0:\n logging.info(\"get_dispatch_app_to_bomber no bids\")\n return\n average_num = get_average_number(len(apps), len(bids_list))\n bomber_end = 0\n with db.atomic():\n for index, bid in enumerate(bids_list):\n current_bomber = bids.get(bid)\n bomber_app_logs = app_logs.get(bid, {})\n bomber_start = bomber_end\n bomber_end = bomber_start + average_num[index]\n bomber_apps = apps[bomber_start:bomber_end]\n from_p, from_np, from_ids,status = [], [], [], 0\n # 区分员工分到的件,哪些是下p的哪些是没下p的\n for ba in bomber_apps:\n promised_date = ba.get(\"promised_date\")\n from_ids.append(ba[\"id\"])\n if promised_date and promised_date.date() >= date.today():\n from_p.append(ba[\"id\"])\n else:\n from_np.append(ba[\"id\"])\n app_status = ApplicationStatus.AB_TEST.value\n # c1A内部下p的件要特殊状态\n if (cycle == Cycle.C1A.value and not out_partner\n and type == ApplicationType.CASH_LOAN.value):\n app_status = ApplicationStatus.PROCESSING.value\n if from_p:\n p = (Application\n .update(ptp_bomber=bid,\n latest_bomber=bid,\n status=app_status)\n .where(Application.id << from_p)\n .execute())\n p_ids = bomber_app_logs.get(\"p_ids\", []) + from_p\n bomber_app_logs[\"p_ids\"] = p_ids\n if from_np:\n np = (Application\n .update(latest_bomber=bid,\n ptp_bomber=None,\n status=ApplicationStatus.AB_TEST.value)\n .where(Application.id << from_np)\n .execute())\n np_ids = bomber_app_logs.get(\"np_ids\", []) + from_np\n bomber_app_logs[\"np_ids\"] = np_ids\n in_param = {\"cycle\": cycle,\n \"dest_partner_id\": current_bomber.partner_id,\n \"application_ids\": from_ids,\n \"dest_bomber_id\": bid,\n }\n if type == ApplicationType.CASH_LOAN.value:\n out_param = {\"src_bomber_id\": bid,\n \"application_ids\": bomber_app_logs.get(\"to_ids\",[]),\n \"month_dispatch\":1\n }\n # 出案\n new_out_record(**out_param)\n # 入案\n new_in_record(**in_param)\n else:\n out_and_in_record_instalment(**in_param)\n bomber_app_logs[\"status\"] = 1\n need_num = bomber_app_logs.get(\"need_num\", 0) + average_num[index]\n bomber_app_logs[\"need_num\"] = need_num\n all_form_ids = bomber_app_logs.get(\"form_ids\", []) + from_ids\n bomber_app_logs[\"form_ids\"] = all_form_ids\n # 如果是内部的分件,不用执行下面的操作\n if not out_partner:\n continue\n # 分给外包的件,要记录在dispatch_app中.将原来的记录删除,在插入新的数据\n try:\n (DispatchApp.delete()\n .where(DispatchApp.application.in_(from_ids))\n .execute())\n dispatch_ins = [{\"application\": id,\n \"partner\": current_bomber.partner_id,\n \"bomber\": bid,\n \"status\": DisAppStatus.NORMAL.value,\n } for id in from_ids]\n (DispatchApp.insert_many(dispatch_ins).execute())\n except Exception as e:\n logging.info(\n \"month_disapp_error error:%s,bid:%s,from_ids:%s\" %\n (str(e), bid, from_ids))\n\n\n# 计算每个件的逾期天数,根据逾期天数更新对应的cycle\ndef calc_instalment_apps_cycle():\n cycle_list = [Cycle.C2.value, Cycle.C3.value]\n for cycle in cycle_list:\n apps = (ApplicationR.select(ApplicationR.id,\n ApplicationR.cycle,\n ApplicationR.overdue_days.alias(\"ods\"),\n ApplicationR.latest_bomber,\n OverdueBillR.status,\n OverdueBillR.overdue_days.alias(\"oods\"))\n .join(OverdueBillR, JOIN_LEFT_OUTER,\n on=ApplicationR.id == OverdueBillR.collection_id)\n .where(ApplicationR.cycle == cycle,\n ApplicationR.type ==\n ApplicationType.CASH_LOAN_STAGING.value,\n ApplicationR.status != ApplicationStatus.REPAID.value)\n .dicts())\n # 计算催收单真实的overdue_days\n lower_apps = {}\n for app in apps:\n if app[\"status\"] == ApplicationStatus.REPAID.value:\n continue\n aid = app[\"id\"]\n if aid in lower_apps:\n lower_apps[aid][\"ods\"] = max(app[\"oods\"], app[\"ods\"])\n else:\n lower_apps[aid] = {\n \"id\": aid,\n \"cycle\": cycle,\n \"ods\": app[\"oods\"],\n }\n # 计算apps的逾期天数和当前cycle是否匹配\n for aid,app in lower_apps.items():\n new_cycle = get_cycle_by_overdue_days(app[\"ods\"])\n if new_cycle != cycle:\n update_param = {\"cycle\":new_cycle,\n \"overdue_days\":app[\"ods\"]}\n entry_time = calc_entry_time(app[\"ods\"])\n update_param.update(entry_time)\n # 更新催收单\n (Application.update(**update_param)\n .where(Application.id == aid)\n .execute())\n\n\n# 降cycle之后根据逾期天数更新以下几个时间\ndef calc_entry_time(overdue_days):\n app_entry_time = {}\n overdue_entry = {\n \"dpd1_entry\": [1, 3],\n \"C1A_entry\": [4, 10],\n \"C1B_entry\": [11, 30],\n \"C2_entry\": [31, 60],\n \"C3_entry\": [61, 90]\n }\n for key,value in overdue_entry.items():\n if value[0] <= overdue_days <= value[1]:\n app_entry_time[key] = datetime.now()\n else:\n app_entry_time[key] = None\n return app_entry_time\n\n# 分期分件\ndef instalment_month_dispatch_app():\n sys_cycle = {1: 'AB_TEST_C1A',\n 2: 'AB_TEST_C1B',\n 3: 'AB_TEST_C2',\n 4: 'AB_TEST_C3'}\n # 降cycle\n calc_instalment_apps_cycle()\n instalment_cycle_list = Cycle.values()[:4]\n for cycle in instalment_cycle_list:\n apps = get_cycle_all_no_paid_app(cycle,\n ApplicationType.CASH_LOAN_STAGING.value)\n if not apps:\n logging.info(\"instalment_month_dispatch no get apps,cycle:%s\"%cycle)\n continue\n app_logs, all_np_apps, all_p_apps = get_app_logs(apps)\n # 获取要分件的成员\n if cycle == Cycle.C1A.value:\n sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])\n sys_values = json.loads(sys_config.value)\n bombers = (Bomber.select().where(Bomber.id << sys_values,\n Bomber.is_del == 0))\n else:\n bombers = (Bomber.select().where(Bomber.is_del == 0,\n Bomber.instalment == cycle))\n bids = {b.id:b for b in bombers}\n if not bids:\n logging.info(\"instalment_month_dispatch no bomber,cycle:%s\"%cycle)\n continue\n dispatch_apps_to_bomber(cycle = cycle,\n apps = all_p_apps,\n bids = bids,\n app_logs = app_logs,\n out_partner = False,\n type = ApplicationType.CASH_LOAN_STAGING.value)\n if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):\n dispatch_apps_to_bomber(cycle=cycle,\n apps=all_np_apps,\n bids=bids,\n app_logs=app_logs,\n out_partner=False,\n type=ApplicationType.CASH_LOAN_STAGING.value)\n else:\n # 未下p的件要有入案记录\n np_ids = [a[\"id\"] for a in all_np_apps]\n np = (Application.update(status=ApplicationStatus.UNCLAIMED.value,\n ptp_bomber=None,\n latest_bomber=None)\n .where(Application.id << np_ids,\n ApplicationStatus != ApplicationStatus.REPAID.value)\n .execute())\n in_param = {\n \"cycle\": cycle,\n \"application_ids\": np_ids,\n \"dest_bomber_id\": cycle\n }\n out_and_in_record_instalment(**in_param)\n\n # 如果有降cycle的件,也记录在历史记录中\n try:\n dispatch_apps_logs = []\n for bid,app in app_logs.items():\n alg = {\n \"bomber_id\": bid,\n \"need_num\": -len(app.get(\"to_ids\", [])),\n \"form_ids\": json.dumps(app.get(\"form_ids\", [])),\n \"to_ids\": json.dumps(app.get(\"to_ids\", [])),\n \"np_ids\": json.dumps(app.get(\"np_ids\", [])),\n \"p_ids\": json.dumps(app.get(\"p_ids\", [])),\n \"status\": 1\n }\n if bid in bids:\n alg[\"need_num\"] = app.get(\"need_num\", 0)\n dispatch_apps_logs.append(alg)\n if dispatch_apps_logs:\n DispatchAppLogs.insert_many(dispatch_apps_logs).execute()\n except Exception as e:\n logging.info(\n \"instalment_dispatch_app_month log error.cycle:%s,error:%s\" % (\n cycle, str(e)))\n\n\n# 每个月月底进行所有件重新分配\n@action(MessageAction.MONTH_DISPATCH_APP)\ndef month_dispatch_app(payload, msg_id):\n # 判断几天的日期是不是1号\n if datetime.today().day != 1:\n logging.info(\"今天不是1号,不能执行分期件\")\n return\n cycle_list = [Cycle.C1A.value,\n Cycle.C1B.value,\n Cycle.C2.value,\n Cycle.C3.value]\n with db.atomic():\n for cycle in cycle_list:\n apps = get_cycle_all_no_paid_app(cycle,\n ApplicationType.CASH_LOAN.value)\n if not apps:\n logging.info(\"month_dispatch_app not get apps.cycle:%s\"%cycle)\n continue\n app_logs, all_np_apps, all_p_apps = get_app_logs(apps)\n np_apps = month_dispatch_app_out_partner(cycle=cycle,\n apps=apps,\n app_logs=app_logs,\n np_apps = all_np_apps)\n if not np_apps and not all_p_apps:\n logging.info(\"month_dispatch_app not get inner apps.cycle:%s\",\n cycle)\n continue\n month_dispatch_app_inner(cycle,np_apps,app_logs,all_p_apps)\n # 分件日志记录在表中\n try:\n dispatch_apps_logs = []\n for bid,app in app_logs.items():\n alg = {\n \"bomber_id\": bid,\n \"need_num\": app.get(\"need_num\",0),\n \"form_ids\": json.dumps(app.get(\"form_ids\", [])),\n \"to_ids\": json.dumps(app.get(\"to_ids\", [])),\n \"np_ids\": json.dumps(app.get(\"np_ids\", [])),\n \"p_ids\": json.dumps(app.get(\"p_ids\", [])),\n \"status\": 1\n }\n dispatch_apps_logs.append(alg)\n for idx in range(0, len(dispatch_apps_logs), 10):\n DispatchAppLogs.insert_many(\n dispatch_apps_logs[idx:idx + 10]).execute()\n except Exception as e:\n logging.error(\n \"insert dispatch_log error:%s,cycle:%s\"%(str(e),cycle))\n try:\n instalment_month_dispatch_app()\n except Exception as e:\n logging.info(\"instalment_month_dispatch_error:%s\"%str(e))\n\n\n# 每天定时统计催收单信息\n@action(MessageAction.SUMMARY_BOMBER_OVERDUE)\ndef summary_bomber_overdue_everyday(payload, msg_id):\n cycle_list = Cycle.values()\n which_day = date.today()\n # 获取每个cycle没有完成的订单\n for cycle in cycle_list:\n apps = (ApplicationR.select(ApplicationR.id,\n ApplicationR.cycle,\n ApplicationR.ptp_bomber,\n ApplicationR.overdue_days,\n ApplicationR.promised_date,\n ApplicationR.follow_up_date,\n ApplicationR.external_id,\n OverdueBillR.status,\n OverdueBillR.periods,\n OverdueBillR.sub_bill_id)\n .join(OverdueBillR, JOIN_LEFT_OUTER,\n on = ApplicationR.id == OverdueBillR.collection_id)\n .where(ApplicationR.status != ApplicationStatus.REPAID.value,\n ApplicationR.no_active == 0,\n ApplicationR.cycle == cycle)\n .dicts())\n\n bomber_overdue_list = []\n for app in apps:\n status = app.get(\"status\")\n if status == ApplicationStatus.REPAID.value:\n continue\n ptp_bomber = app.get(\"ptp_bomber\")\n promised_date = app.get(\"promised_date\")\n follow_up_date = app.get(\"follow_up_date\")\n if not promised_date or promised_date.date() < date.today():\n ptp_bomber = promised_date = None\n if not follow_up_date or follow_up_date.date() < date.today():\n follow_up_date = None\n overdue_dict = {\n \"collection_id\": app.get(\"id\"),\n \"external_id\": app.get(\"external_id\"),\n \"sub_bill_id\": app.get(\"sub_bill_id\"),\n \"periods\": app.get(\"periods\"),\n \"cycle\": app.get(\"cycle\") if app.get(\"cycle\") else cycle,\n \"ptp_bomber\": ptp_bomber,\n \"promised_date\": promised_date,\n \"follow_up_date\": follow_up_date,\n \"which_day\": which_day,\n \"overdue_days\": app.get(\"overdue_days\")\n }\n bomber_overdue_list.append(overdue_dict)\n try:\n if bomber_overdue_list:\n with db.atomic():\n for index in range(0, len(bomber_overdue_list), 1000):\n insert_list = bomber_overdue_list[index: index+1000]\n BomberOverdue.insert_many(insert_list).execute()\n except Exception as e:\n logging.info(\n \"summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s\"%(\n cycle,str(which_day),str(e)))\n\n# 每分钟对员工的下p件个数做个统计\n@action(MessageAction.BOMBER_PTP_REAL_TIME_SUMMARY)\ndef bomber_ptp_real_time_summary(payload, msg_id):\n ptp_switch_number = 200\n sys_ptp_switch = (SystemConfig.select()\n .where(SystemConfig.key == 'PTP_SWITCH_NUMBER')\n .first())\n if sys_ptp_switch and sys_ptp_switch.value.isdigit():\n ptp_switch_number = int(sys_ptp_switch.value)\n today = datetime.today().date()\n ptp_apps = (ApplicationR.select(fn.COUNT(ApplicationR.id).alias('ptp_cnt'),\n ApplicationR.latest_bomber)\n .where(ApplicationR.status != ApplicationStatus.REPAID.value,\n ApplicationR.cycle < Cycle.C2.value,\n ApplicationR.promised_date >= today,\n ApplicationR.latest_bomber.is_null(False))\n .group_by(ApplicationR.latest_bomber))\n\n bomber_ptps = (BomberPtp.select(BomberPtp.bomber_id))\n bomber_ptp_bids = [b.bomber_id for b in bomber_ptps]\n insert_result = []\n for app in ptp_apps:\n ptp_switch = BomberCallSwitch.ON.value\n if app.ptp_cnt >= ptp_switch_number:\n ptp_switch = BomberCallSwitch.OFF.value\n params = {\"bomber_id\": app.latest_bomber_id,\n \"ptp_cnt\": app.ptp_cnt,\n \"ptp_switch\": ptp_switch,\n \"auto_ext\": app.latest_bomber.auto_ext}\n if app.latest_bomber_id in bomber_ptp_bids:\n try:\n q = (BomberPtp.update(**params)\n .where(BomberPtp.bomber_id==app.latest_bomber_id)\n .execute())\n except Exception as e:\n logging.error(\"ptp_reil_time_summary_error:%s,data,bid:%s\" % (\n str(e),params,app.latest_bomber_id))\n else:\n insert_result.append(params)\n if insert_result:\n BomberPtp.insert_many(insert_result).execute()\n\n# 每天的10:00,14:00,16:30不让接自动外呼,员工把自动外呼的件跟进完,才能接自动外呼\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)\ndef today_ptp_auto_call_switch(payload, msg_id):\n today = datetime.today().date()\n next_day = today + timedelta(days=1)\n # 获取有今天p到期的件的催收员\n apps = (ApplicationR.select(ApplicationR.latest_bomber)\n .where(ApplicationR.promised_date < next_day,\n ApplicationR.promised_date >= today,\n ApplicationR.promised_date.is_null(False),\n ApplicationR.status != ApplicationStatus.REPAID.value,\n ApplicationR.cycle < Cycle.C2.value,\n ApplicationR.latest_bomber.is_null(False))\n .group_by(ApplicationR.latest_bomber))\n bids = [a.latest_bomber_id for a in apps]\n if not bids:\n return\n q = (BomberPtp.update(today_switch=BomberCallSwitch.OFF.value)\n .where(BomberPtp.auto_ext.is_null(False),\n BomberPtp.bomber_id << bids)\n .execute())\n\n# 每天早上8点定时刷新催收员自动外呼的状态\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)\ndef update_today_switch_every_day(payload, msg_id):\n q = (BomberPtp.update(today_switch=BomberCallSwitch.ON.value)\n .where(BomberPtp.auto_ext.is_null(False))\n .execute())\n\n# 用户修改电话通知bomber\n@action(MessageAction.USER_UPDATE_PHONE)\ndef user_change_phone(payload, msg_id):\n user_id = payload.get(\"user_id\")\n new_mobile_no = payload.get(\"new_mobile_no\")\n if not all([user_id, new_mobile_no]):\n logging.info(\"用户修改电话,没有获取到用户id获这用户手机号\")\n return\n source = 'applicant updated number'\n contacts = (Contact.select()\n .where(Contact.user_id == int(user_id)))\n if not contacts.exists():\n logging.info(\"用户在contact中没有记录\")\n return\n new_contact = contacts.where(Contact.number == new_mobile_no,\n Contact.source == source)\n if new_contact.exists():\n logging.info(\"用户手机号已存在\")\n return\n contact = contacts.order_by(-Contact.created_at).first()\n Contact.create(user_id=contact.user_id,\n name=contact.name,\n number = new_mobile_no,\n source = source,\n relationship = Relationship.APPLICANT.value,\n real_relationship = Relationship.APPLICANT.value)\n\n",
"step-ids": [
60,
69,
74,
80,
146
]
}
|
[
60,
69,
74,
80,
146
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
ACHAEA_ENDPOINT = 'https://api.achaea.com'
def _requires_auth(func):
def wrapper(self, *args, **kwargs):
if self.auth is not True:
raise APIError()
return func(self, *args, **kwargs)
return wrapper
class API:
auth = None
CHECKAUTH_RESOURCE = '/checkauth.json'
CHARACTERS_RESOURCE = '/characters.json'
SPECIFIC_CHARACTER_RESOURCE = '/characters/{}.json'
NEWS_RESOURCE = '/news.json'
SPECIFIC_NEWS_RESOURCE = '/news/{}.json'
SPECIFIC_NEWS_POST_RESOURCE = '/news/{}/{}.json'
def __init__(self, endpoint=ACHAEA_ENDPOINT, username=None, password=None):
self.endpoint = endpoint
if username is not None and password is not None:
self.username = username
self.password = password
self.checkauth()
def _get_endpoint(self, fmt_str, args):
return self.endpoint + fmt_str.format(*args)
def _make_request(self, resource, args=(), authed=False, params={}):
endpoint = self._get_endpoint(resource, args)
auth_params = {}
if authed:
if self.username is None or self.password is None:
raise APIError()
auth_params = {'character': self.username, 'password': self.password}
params = params.copy()
params.update(auth_params)
req = requests.get(endpoint, params=params)
return req
def checkauth(self):
if self.auth is not None:
return self.auth
req = self._make_request(self.CHECKAUTH_RESOURCE, authed=True)
if req.status_code == 200:
self.auth = True
else:
self.auth = False
return self.auth
def characters(self):
req = self._make_request(self.CHARACTERS_RESOURCE)
if req.status_code != 200:
return None
result = req.json()
characters = []
for character in result['characters']:
characters.append(character['name'])
return characters
@_requires_auth
def _character_authed(self, character):
req = self._make_request(self.SPECIFIC_CHARACTER_RESOURCE, (character,), True)
if req.status_code != 200:
return None
result = req.json()
return Character.parse(result)
def _character_unauthed(self, character):
req = self._make_request(self.SPECIFIC_CHARACTER_RESOURCE, (character,), False)
if req.status_code != 200:
return None
result = req.json()
return Character.parse(result)
def character(self, character=None):
if self.auth is True and (self.username == character or character is None):
return self._character_authed(character or self.username)
else:
return self._character_unauthed(character)
def sections(self):
req = self._make_request(self.NEWS_RESOURCE, authed=self.auth)
if req.status_code != 200:
return None
result = req.json()
sections_list = map(NewsSection.parse, result)
return sections_list
def posts(self, section, page=None):
params = {}
if page is not None:
params['page'] = page
req = self._make_request(self.SPECIFIC_NEWS_RESOURCE, (section,), authed=self.auth,
params=params)
if req.status_code != 200:
return None
result = req.json()
return result
def post(self, section, number):
pass
class APIError(Exception):
pass
class Character:
def __init__(self, name, fullname, level, house, xp_rank, player_kills, mob_kills,
explorer_rank, current_class, messages_total=None, messages_unread=None):
self.name = name
self.fullname = fullname
self.level = level
self.house = house
self.xp_rank = xp_rank
self.player_kills = player_kills
self.mob_kills = mob_kills
self.explorer_rank = explorer_rank
self.current_class = current_class
self.messages_total = messages_total
self.messages_unread = messages_unread
@staticmethod
def parse(json_data):
name = json_data['name']
fullname = json_data['fullname']
level = int(json_data['level'])
house = json_data['house']
xp_rank = json_data['xp_rank']
player_kills = int(json_data['player_kills'])
mob_kills = int(json_data['mob_kills'])
explorer_rank = int(json_data['explorer_rank'])
current_class = json_data['class']
messages_total = None
messages_unread = None
if 'messages_total' in json_data and 'messages_unread' in json_data:
messages_total = json_data['messages_total']
messages_unread = json_data['messages_unread']
return Character(name, fullname, level, house, xp_rank, player_kills, mob_kills,
explorer_rank, current_class, messages_total, messages_unread)
def __repr__(self):
return '<Character: {} ({})>'.format(self.name, self.fullname)
class NewsSection:
def __init__(self, name, read, total, unread):
self.name = name
self.read = read
self.total = total
self.unread = unread
@staticmethod
def parse(json_data):
name = json_data['name']
read = int(json_data['read'])
total = int(json_data['total'])
unread = int(json_data['unread'])
return NewsSection(name, read, total, unread)
def __repr__(self):
return '<NewsSection: {} ({}/{} unread)>'.format(self.name, self.read, self.total)
|
normal
|
{
"blob_id": "da66b254afb3a8fcd3783a38d8624caa917e58c3",
"index": 652,
"step-1": "<mask token>\n\n\nclass API:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def characters(self):\n req = self._make_request(self.CHARACTERS_RESOURCE)\n if req.status_code != 200:\n return None\n result = req.json()\n characters = []\n for character in result['characters']:\n characters.append(character['name'])\n return characters\n <mask token>\n\n def _character_unauthed(self, character):\n req = self._make_request(self.SPECIFIC_CHARACTER_RESOURCE, (\n character,), False)\n if req.status_code != 200:\n return None\n result = req.json()\n return Character.parse(result)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass APIError(Exception):\n pass\n\n\nclass Character:\n\n def __init__(self, name, fullname, level, house, xp_rank, player_kills,\n mob_kills, explorer_rank, current_class, messages_total=None,\n messages_unread=None):\n self.name = name\n self.fullname = fullname\n self.level = level\n self.house = house\n self.xp_rank = xp_rank\n self.player_kills = player_kills\n self.mob_kills = mob_kills\n self.explorer_rank = explorer_rank\n self.current_class = current_class\n self.messages_total = messages_total\n self.messages_unread = messages_unread\n\n @staticmethod\n def parse(json_data):\n name = json_data['name']\n fullname = json_data['fullname']\n level = int(json_data['level'])\n house = json_data['house']\n xp_rank = json_data['xp_rank']\n player_kills = int(json_data['player_kills'])\n mob_kills = int(json_data['mob_kills'])\n explorer_rank = int(json_data['explorer_rank'])\n current_class = json_data['class']\n messages_total = None\n messages_unread = None\n if 'messages_total' in json_data and 'messages_unread' in json_data:\n messages_total = json_data['messages_total']\n messages_unread = json_data['messages_unread']\n return Character(name, fullname, level, house, xp_rank,\n player_kills, mob_kills, explorer_rank, current_class,\n messages_total, messages_unread)\n\n def __repr__(self):\n return '<Character: {} ({})>'.format(self.name, self.fullname)\n\n\nclass NewsSection:\n\n def __init__(self, name, read, total, unread):\n self.name = name\n self.read = read\n self.total = total\n self.unread = unread\n\n @staticmethod\n def parse(json_data):\n name = json_data['name']\n read = int(json_data['read'])\n total = int(json_data['total'])\n unread = int(json_data['unread'])\n return NewsSection(name, read, total, unread)\n\n def __repr__(self):\n return '<NewsSection: {} ({}/{} unread)>'.format(self.name, self.\n read, self.total)\n",
"step-2": "<mask token>\n\n\nclass API:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, endpoint=ACHAEA_ENDPOINT, username=None, password=None):\n self.endpoint = endpoint\n if username is not None and password is not None:\n self.username = username\n self.password = password\n self.checkauth()\n <mask token>\n <mask token>\n <mask token>\n\n def characters(self):\n req = self._make_request(self.CHARACTERS_RESOURCE)\n if req.status_code != 200:\n return None\n result = req.json()\n characters = []\n for character in result['characters']:\n characters.append(character['name'])\n return characters\n\n @_requires_auth\n def _character_authed(self, character):\n req = self._make_request(self.SPECIFIC_CHARACTER_RESOURCE, (\n character,), True)\n if req.status_code != 200:\n return None\n result = req.json()\n return Character.parse(result)\n\n def _character_unauthed(self, character):\n req = self._make_request(self.SPECIFIC_CHARACTER_RESOURCE, (\n character,), False)\n if req.status_code != 200:\n return None\n result = req.json()\n return Character.parse(result)\n\n def character(self, character=None):\n if self.auth is True and (self.username == character or character is\n None):\n return self._character_authed(character or self.username)\n else:\n return self._character_unauthed(character)\n <mask token>\n <mask token>\n <mask token>\n\n\nclass APIError(Exception):\n pass\n\n\nclass Character:\n\n def __init__(self, name, fullname, level, house, xp_rank, player_kills,\n mob_kills, explorer_rank, current_class, messages_total=None,\n messages_unread=None):\n self.name = name\n self.fullname = fullname\n self.level = level\n self.house = house\n self.xp_rank = xp_rank\n self.player_kills = player_kills\n self.mob_kills = mob_kills\n self.explorer_rank = explorer_rank\n self.current_class = current_class\n self.messages_total = messages_total\n self.messages_unread = messages_unread\n\n @staticmethod\n def parse(json_data):\n name = json_data['name']\n fullname = json_data['fullname']\n level = int(json_data['level'])\n house = json_data['house']\n xp_rank = json_data['xp_rank']\n player_kills = int(json_data['player_kills'])\n mob_kills = int(json_data['mob_kills'])\n explorer_rank = int(json_data['explorer_rank'])\n current_class = json_data['class']\n messages_total = None\n messages_unread = None\n if 'messages_total' in json_data and 'messages_unread' in json_data:\n messages_total = json_data['messages_total']\n messages_unread = json_data['messages_unread']\n return Character(name, fullname, level, house, xp_rank,\n player_kills, mob_kills, explorer_rank, current_class,\n messages_total, messages_unread)\n\n def __repr__(self):\n return '<Character: {} ({})>'.format(self.name, self.fullname)\n\n\nclass NewsSection:\n\n def __init__(self, name, read, total, unread):\n self.name = name\n self.read = read\n self.total = total\n self.unread = unread\n\n @staticmethod\n def parse(json_data):\n name = json_data['name']\n read = int(json_data['read'])\n total = int(json_data['total'])\n unread = int(json_data['unread'])\n return NewsSection(name, read, total, unread)\n\n def __repr__(self):\n return '<NewsSection: {} ({}/{} unread)>'.format(self.name, self.\n read, self.total)\n",
"step-3": "<mask token>\n\n\nclass API:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, endpoint=ACHAEA_ENDPOINT, username=None, password=None):\n self.endpoint = endpoint\n if username is not None and password is not None:\n self.username = username\n self.password = password\n self.checkauth()\n\n def _get_endpoint(self, fmt_str, args):\n return self.endpoint + fmt_str.format(*args)\n\n def _make_request(self, resource, args=(), authed=False, params={}):\n endpoint = self._get_endpoint(resource, args)\n auth_params = {}\n if authed:\n if self.username is None or self.password is None:\n raise APIError()\n auth_params = {'character': self.username, 'password': self.\n password}\n params = params.copy()\n params.update(auth_params)\n req = requests.get(endpoint, params=params)\n return req\n\n def checkauth(self):\n if self.auth is not None:\n return self.auth\n req = self._make_request(self.CHECKAUTH_RESOURCE, authed=True)\n if req.status_code == 200:\n self.auth = True\n else:\n self.auth = False\n return self.auth\n\n def characters(self):\n req = self._make_request(self.CHARACTERS_RESOURCE)\n if req.status_code != 200:\n return None\n result = req.json()\n characters = []\n for character in result['characters']:\n characters.append(character['name'])\n return characters\n\n @_requires_auth\n def _character_authed(self, character):\n req = self._make_request(self.SPECIFIC_CHARACTER_RESOURCE, (\n character,), True)\n if req.status_code != 200:\n return None\n result = req.json()\n return Character.parse(result)\n\n def _character_unauthed(self, character):\n req = self._make_request(self.SPECIFIC_CHARACTER_RESOURCE, (\n character,), False)\n if req.status_code != 200:\n return None\n result = req.json()\n return Character.parse(result)\n\n def character(self, character=None):\n if self.auth is True and (self.username == character or character is\n None):\n return self._character_authed(character or self.username)\n else:\n return self._character_unauthed(character)\n\n def sections(self):\n req = self._make_request(self.NEWS_RESOURCE, authed=self.auth)\n if req.status_code != 200:\n return None\n result = req.json()\n sections_list = map(NewsSection.parse, result)\n return sections_list\n\n def posts(self, section, page=None):\n params = {}\n if page is not None:\n params['page'] = page\n req = self._make_request(self.SPECIFIC_NEWS_RESOURCE, (section,),\n authed=self.auth, params=params)\n if req.status_code != 200:\n return None\n result = req.json()\n return result\n <mask token>\n\n\nclass APIError(Exception):\n pass\n\n\nclass Character:\n\n def __init__(self, name, fullname, level, house, xp_rank, player_kills,\n mob_kills, explorer_rank, current_class, messages_total=None,\n messages_unread=None):\n self.name = name\n self.fullname = fullname\n self.level = level\n self.house = house\n self.xp_rank = xp_rank\n self.player_kills = player_kills\n self.mob_kills = mob_kills\n self.explorer_rank = explorer_rank\n self.current_class = current_class\n self.messages_total = messages_total\n self.messages_unread = messages_unread\n\n @staticmethod\n def parse(json_data):\n name = json_data['name']\n fullname = json_data['fullname']\n level = int(json_data['level'])\n house = json_data['house']\n xp_rank = json_data['xp_rank']\n player_kills = int(json_data['player_kills'])\n mob_kills = int(json_data['mob_kills'])\n explorer_rank = int(json_data['explorer_rank'])\n current_class = json_data['class']\n messages_total = None\n messages_unread = None\n if 'messages_total' in json_data and 'messages_unread' in json_data:\n messages_total = json_data['messages_total']\n messages_unread = json_data['messages_unread']\n return Character(name, fullname, level, house, xp_rank,\n player_kills, mob_kills, explorer_rank, current_class,\n messages_total, messages_unread)\n\n def __repr__(self):\n return '<Character: {} ({})>'.format(self.name, self.fullname)\n\n\nclass NewsSection:\n\n def __init__(self, name, read, total, unread):\n self.name = name\n self.read = read\n self.total = total\n self.unread = unread\n\n @staticmethod\n def parse(json_data):\n name = json_data['name']\n read = int(json_data['read'])\n total = int(json_data['total'])\n unread = int(json_data['unread'])\n return NewsSection(name, read, total, unread)\n\n def __repr__(self):\n return '<NewsSection: {} ({}/{} unread)>'.format(self.name, self.\n read, self.total)\n",
"step-4": "<mask token>\n\n\ndef _requires_auth(func):\n\n def wrapper(self, *args, **kwargs):\n if self.auth is not True:\n raise APIError()\n return func(self, *args, **kwargs)\n return wrapper\n\n\nclass API:\n auth = None\n CHECKAUTH_RESOURCE = '/checkauth.json'\n CHARACTERS_RESOURCE = '/characters.json'\n SPECIFIC_CHARACTER_RESOURCE = '/characters/{}.json'\n NEWS_RESOURCE = '/news.json'\n SPECIFIC_NEWS_RESOURCE = '/news/{}.json'\n SPECIFIC_NEWS_POST_RESOURCE = '/news/{}/{}.json'\n\n def __init__(self, endpoint=ACHAEA_ENDPOINT, username=None, password=None):\n self.endpoint = endpoint\n if username is not None and password is not None:\n self.username = username\n self.password = password\n self.checkauth()\n\n def _get_endpoint(self, fmt_str, args):\n return self.endpoint + fmt_str.format(*args)\n\n def _make_request(self, resource, args=(), authed=False, params={}):\n endpoint = self._get_endpoint(resource, args)\n auth_params = {}\n if authed:\n if self.username is None or self.password is None:\n raise APIError()\n auth_params = {'character': self.username, 'password': self.\n password}\n params = params.copy()\n params.update(auth_params)\n req = requests.get(endpoint, params=params)\n return req\n\n def checkauth(self):\n if self.auth is not None:\n return self.auth\n req = self._make_request(self.CHECKAUTH_RESOURCE, authed=True)\n if req.status_code == 200:\n self.auth = True\n else:\n self.auth = False\n return self.auth\n\n def characters(self):\n req = self._make_request(self.CHARACTERS_RESOURCE)\n if req.status_code != 200:\n return None\n result = req.json()\n characters = []\n for character in result['characters']:\n characters.append(character['name'])\n return characters\n\n @_requires_auth\n def _character_authed(self, character):\n req = self._make_request(self.SPECIFIC_CHARACTER_RESOURCE, (\n character,), True)\n if req.status_code != 200:\n return None\n result = req.json()\n return Character.parse(result)\n\n def _character_unauthed(self, character):\n req = self._make_request(self.SPECIFIC_CHARACTER_RESOURCE, (\n character,), False)\n if req.status_code != 200:\n return None\n result = req.json()\n return Character.parse(result)\n\n def character(self, character=None):\n if self.auth is True and (self.username == character or character is\n None):\n return self._character_authed(character or self.username)\n else:\n return self._character_unauthed(character)\n\n def sections(self):\n req = self._make_request(self.NEWS_RESOURCE, authed=self.auth)\n if req.status_code != 200:\n return None\n result = req.json()\n sections_list = map(NewsSection.parse, result)\n return sections_list\n\n def posts(self, section, page=None):\n params = {}\n if page is not None:\n params['page'] = page\n req = self._make_request(self.SPECIFIC_NEWS_RESOURCE, (section,),\n authed=self.auth, params=params)\n if req.status_code != 200:\n return None\n result = req.json()\n return result\n\n def post(self, section, number):\n pass\n\n\nclass APIError(Exception):\n pass\n\n\nclass Character:\n\n def __init__(self, name, fullname, level, house, xp_rank, player_kills,\n mob_kills, explorer_rank, current_class, messages_total=None,\n messages_unread=None):\n self.name = name\n self.fullname = fullname\n self.level = level\n self.house = house\n self.xp_rank = xp_rank\n self.player_kills = player_kills\n self.mob_kills = mob_kills\n self.explorer_rank = explorer_rank\n self.current_class = current_class\n self.messages_total = messages_total\n self.messages_unread = messages_unread\n\n @staticmethod\n def parse(json_data):\n name = json_data['name']\n fullname = json_data['fullname']\n level = int(json_data['level'])\n house = json_data['house']\n xp_rank = json_data['xp_rank']\n player_kills = int(json_data['player_kills'])\n mob_kills = int(json_data['mob_kills'])\n explorer_rank = int(json_data['explorer_rank'])\n current_class = json_data['class']\n messages_total = None\n messages_unread = None\n if 'messages_total' in json_data and 'messages_unread' in json_data:\n messages_total = json_data['messages_total']\n messages_unread = json_data['messages_unread']\n return Character(name, fullname, level, house, xp_rank,\n player_kills, mob_kills, explorer_rank, current_class,\n messages_total, messages_unread)\n\n def __repr__(self):\n return '<Character: {} ({})>'.format(self.name, self.fullname)\n\n\nclass NewsSection:\n\n def __init__(self, name, read, total, unread):\n self.name = name\n self.read = read\n self.total = total\n self.unread = unread\n\n @staticmethod\n def parse(json_data):\n name = json_data['name']\n read = int(json_data['read'])\n total = int(json_data['total'])\n unread = int(json_data['unread'])\n return NewsSection(name, read, total, unread)\n\n def __repr__(self):\n return '<NewsSection: {} ({}/{} unread)>'.format(self.name, self.\n read, self.total)\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport requests\n\nACHAEA_ENDPOINT = 'https://api.achaea.com'\n\n\ndef _requires_auth(func):\n def wrapper(self, *args, **kwargs):\n if self.auth is not True:\n raise APIError()\n return func(self, *args, **kwargs)\n return wrapper\n\n\nclass API:\n\n auth = None\n\n CHECKAUTH_RESOURCE = '/checkauth.json'\n CHARACTERS_RESOURCE = '/characters.json'\n SPECIFIC_CHARACTER_RESOURCE = '/characters/{}.json'\n NEWS_RESOURCE = '/news.json'\n SPECIFIC_NEWS_RESOURCE = '/news/{}.json'\n SPECIFIC_NEWS_POST_RESOURCE = '/news/{}/{}.json'\n\n def __init__(self, endpoint=ACHAEA_ENDPOINT, username=None, password=None):\n self.endpoint = endpoint\n if username is not None and password is not None:\n self.username = username\n self.password = password\n self.checkauth()\n\n def _get_endpoint(self, fmt_str, args):\n return self.endpoint + fmt_str.format(*args)\n\n def _make_request(self, resource, args=(), authed=False, params={}):\n endpoint = self._get_endpoint(resource, args)\n auth_params = {}\n if authed:\n if self.username is None or self.password is None:\n raise APIError()\n auth_params = {'character': self.username, 'password': self.password}\n params = params.copy()\n params.update(auth_params)\n req = requests.get(endpoint, params=params)\n return req\n\n def checkauth(self):\n if self.auth is not None:\n return self.auth\n\n req = self._make_request(self.CHECKAUTH_RESOURCE, authed=True)\n\n if req.status_code == 200:\n self.auth = True\n else:\n self.auth = False\n\n return self.auth\n\n def characters(self):\n req = self._make_request(self.CHARACTERS_RESOURCE)\n if req.status_code != 200:\n return None\n\n result = req.json()\n characters = []\n for character in result['characters']:\n characters.append(character['name'])\n return characters\n\n @_requires_auth\n def _character_authed(self, character):\n req = self._make_request(self.SPECIFIC_CHARACTER_RESOURCE, (character,), True)\n if req.status_code != 200:\n return None\n\n result = req.json()\n return Character.parse(result)\n\n def _character_unauthed(self, character):\n req = self._make_request(self.SPECIFIC_CHARACTER_RESOURCE, (character,), False)\n if req.status_code != 200:\n return None\n\n result = req.json()\n return Character.parse(result)\n\n def character(self, character=None):\n if self.auth is True and (self.username == character or character is None):\n return self._character_authed(character or self.username)\n else:\n return self._character_unauthed(character)\n\n def sections(self):\n req = self._make_request(self.NEWS_RESOURCE, authed=self.auth)\n if req.status_code != 200:\n return None\n\n result = req.json()\n sections_list = map(NewsSection.parse, result)\n return sections_list\n\n def posts(self, section, page=None):\n params = {}\n if page is not None:\n params['page'] = page\n req = self._make_request(self.SPECIFIC_NEWS_RESOURCE, (section,), authed=self.auth,\n params=params)\n if req.status_code != 200:\n return None\n\n result = req.json()\n return result\n\n def post(self, section, number):\n pass\n\n\nclass APIError(Exception):\n pass\n\n\nclass Character:\n\n def __init__(self, name, fullname, level, house, xp_rank, player_kills, mob_kills,\n explorer_rank, current_class, messages_total=None, messages_unread=None):\n self.name = name\n self.fullname = fullname\n self.level = level\n self.house = house\n self.xp_rank = xp_rank\n self.player_kills = player_kills\n self.mob_kills = mob_kills\n self.explorer_rank = explorer_rank\n self.current_class = current_class\n self.messages_total = messages_total\n self.messages_unread = messages_unread\n\n @staticmethod\n def parse(json_data):\n name = json_data['name']\n fullname = json_data['fullname']\n level = int(json_data['level'])\n house = json_data['house']\n xp_rank = json_data['xp_rank']\n player_kills = int(json_data['player_kills'])\n mob_kills = int(json_data['mob_kills'])\n explorer_rank = int(json_data['explorer_rank'])\n current_class = json_data['class']\n messages_total = None\n messages_unread = None\n if 'messages_total' in json_data and 'messages_unread' in json_data:\n messages_total = json_data['messages_total']\n messages_unread = json_data['messages_unread']\n\n return Character(name, fullname, level, house, xp_rank, player_kills, mob_kills,\n explorer_rank, current_class, messages_total, messages_unread)\n\n def __repr__(self):\n return '<Character: {} ({})>'.format(self.name, self.fullname)\n\n\nclass NewsSection:\n\n def __init__(self, name, read, total, unread):\n self.name = name\n self.read = read\n self.total = total\n self.unread = unread\n\n @staticmethod\n def parse(json_data):\n name = json_data['name']\n read = int(json_data['read'])\n total = int(json_data['total'])\n unread = int(json_data['unread'])\n return NewsSection(name, read, total, unread)\n\n def __repr__(self):\n return '<NewsSection: {} ({}/{} unread)>'.format(self.name, self.read, self.total)\n",
"step-ids": [
12,
15,
20,
23,
26
]
}
|
[
12,
15,
20,
23,
26
] |
import scraperwiki, lxml.html, urllib2, re
from datetime import datetime
#html = scraperwiki.scrape("http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm")
doc = lxml.html.parse(urllib2.urlopen("http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm"))
root = doc.getroot()
#select the table that contains the offenders, ignoring the first one that contains the header row
for tr in root.xpath("//div[@id='verdiSection10']/div/div/table/tbody/tr")[1:]:
data = {
'conviction_date': datetime.strptime(
re.match("(\d+/\d+/\d+)", tr[0].text_content().strip()).group(1),
"%d/%m/%Y"), #sometimes they include two dates in the entry, so we'll have to grab the first (damnit)
'business_name': tr[1].text_content().strip(),
'business_address': tr[2].text_content().strip(),
'convicted_name': tr[3].text_content().strip(),
'agency': tr[4].text_content().strip(),
'pdf': tr[5].xpath(".//a")[0].get("href")
}
scraperwiki.sqlite.save(unique_keys=['pdf'], data=data)
import scraperwiki, lxml.html, urllib2, re
from datetime import datetime
#html = scraperwiki.scrape("http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm")
doc = lxml.html.parse(urllib2.urlopen("http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm"))
root = doc.getroot()
#select the table that contains the offenders, ignoring the first one that contains the header row
for tr in root.xpath("//div[@id='verdiSection10']/div/div/table/tbody/tr")[1:]:
data = {
'conviction_date': datetime.strptime(
re.match("(\d+/\d+/\d+)", tr[0].text_content().strip()).group(1),
"%d/%m/%Y"), #sometimes they include two dates in the entry, so we'll have to grab the first (damnit)
'business_name': tr[1].text_content().strip(),
'business_address': tr[2].text_content().strip(),
'convicted_name': tr[3].text_content().strip(),
'agency': tr[4].text_content().strip(),
'pdf': tr[5].xpath(".//a")[0].get("href")
}
scraperwiki.sqlite.save(unique_keys=['pdf'], data=data)
|
normal
|
{
"blob_id": "e870900249b121f2416d7be543752ebf6392b6be",
"index": 6868,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor tr in root.xpath(\"//div[@id='verdiSection10']/div/div/table/tbody/tr\")[1:]:\n data = {'conviction_date': datetime.strptime(re.match(\n '(\\\\d+/\\\\d+/\\\\d+)', tr[0].text_content().strip()).group(1),\n '%d/%m/%Y'), 'business_name': tr[1].text_content().strip(),\n 'business_address': tr[2].text_content().strip(), 'convicted_name':\n tr[3].text_content().strip(), 'agency': tr[4].text_content().strip(\n ), 'pdf': tr[5].xpath('.//a')[0].get('href')}\n scraperwiki.sqlite.save(unique_keys=['pdf'], data=data)\n<mask token>\nfor tr in root.xpath(\"//div[@id='verdiSection10']/div/div/table/tbody/tr\")[1:]:\n data = {'conviction_date': datetime.strptime(re.match(\n '(\\\\d+/\\\\d+/\\\\d+)', tr[0].text_content().strip()).group(1),\n '%d/%m/%Y'), 'business_name': tr[1].text_content().strip(),\n 'business_address': tr[2].text_content().strip(), 'convicted_name':\n tr[3].text_content().strip(), 'agency': tr[4].text_content().strip(\n ), 'pdf': tr[5].xpath('.//a')[0].get('href')}\n scraperwiki.sqlite.save(unique_keys=['pdf'], data=data)\n",
"step-3": "<mask token>\ndoc = lxml.html.parse(urllib2.urlopen(\n 'http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm'\n ))\nroot = doc.getroot()\nfor tr in root.xpath(\"//div[@id='verdiSection10']/div/div/table/tbody/tr\")[1:]:\n data = {'conviction_date': datetime.strptime(re.match(\n '(\\\\d+/\\\\d+/\\\\d+)', tr[0].text_content().strip()).group(1),\n '%d/%m/%Y'), 'business_name': tr[1].text_content().strip(),\n 'business_address': tr[2].text_content().strip(), 'convicted_name':\n tr[3].text_content().strip(), 'agency': tr[4].text_content().strip(\n ), 'pdf': tr[5].xpath('.//a')[0].get('href')}\n scraperwiki.sqlite.save(unique_keys=['pdf'], data=data)\n<mask token>\ndoc = lxml.html.parse(urllib2.urlopen(\n 'http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm'\n ))\nroot = doc.getroot()\nfor tr in root.xpath(\"//div[@id='verdiSection10']/div/div/table/tbody/tr\")[1:]:\n data = {'conviction_date': datetime.strptime(re.match(\n '(\\\\d+/\\\\d+/\\\\d+)', tr[0].text_content().strip()).group(1),\n '%d/%m/%Y'), 'business_name': tr[1].text_content().strip(),\n 'business_address': tr[2].text_content().strip(), 'convicted_name':\n tr[3].text_content().strip(), 'agency': tr[4].text_content().strip(\n ), 'pdf': tr[5].xpath('.//a')[0].get('href')}\n scraperwiki.sqlite.save(unique_keys=['pdf'], data=data)\n",
"step-4": "import scraperwiki, lxml.html, urllib2, re\nfrom datetime import datetime\ndoc = lxml.html.parse(urllib2.urlopen(\n 'http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm'\n ))\nroot = doc.getroot()\nfor tr in root.xpath(\"//div[@id='verdiSection10']/div/div/table/tbody/tr\")[1:]:\n data = {'conviction_date': datetime.strptime(re.match(\n '(\\\\d+/\\\\d+/\\\\d+)', tr[0].text_content().strip()).group(1),\n '%d/%m/%Y'), 'business_name': tr[1].text_content().strip(),\n 'business_address': tr[2].text_content().strip(), 'convicted_name':\n tr[3].text_content().strip(), 'agency': tr[4].text_content().strip(\n ), 'pdf': tr[5].xpath('.//a')[0].get('href')}\n scraperwiki.sqlite.save(unique_keys=['pdf'], data=data)\nimport scraperwiki, lxml.html, urllib2, re\nfrom datetime import datetime\ndoc = lxml.html.parse(urllib2.urlopen(\n 'http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm'\n ))\nroot = doc.getroot()\nfor tr in root.xpath(\"//div[@id='verdiSection10']/div/div/table/tbody/tr\")[1:]:\n data = {'conviction_date': datetime.strptime(re.match(\n '(\\\\d+/\\\\d+/\\\\d+)', tr[0].text_content().strip()).group(1),\n '%d/%m/%Y'), 'business_name': tr[1].text_content().strip(),\n 'business_address': tr[2].text_content().strip(), 'convicted_name':\n tr[3].text_content().strip(), 'agency': tr[4].text_content().strip(\n ), 'pdf': tr[5].xpath('.//a')[0].get('href')}\n scraperwiki.sqlite.save(unique_keys=['pdf'], data=data)\n",
"step-5": "import scraperwiki, lxml.html, urllib2, re\nfrom datetime import datetime\n\n#html = scraperwiki.scrape(\"http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm\")\ndoc = lxml.html.parse(urllib2.urlopen(\"http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm\"))\nroot = doc.getroot()\n\n#select the table that contains the offenders, ignoring the first one that contains the header row\nfor tr in root.xpath(\"//div[@id='verdiSection10']/div/div/table/tbody/tr\")[1:]:\n data = {\n 'conviction_date': datetime.strptime(\n re.match(\"(\\d+/\\d+/\\d+)\", tr[0].text_content().strip()).group(1),\n \"%d/%m/%Y\"), #sometimes they include two dates in the entry, so we'll have to grab the first (damnit)\n 'business_name': tr[1].text_content().strip(),\n 'business_address': tr[2].text_content().strip(),\n 'convicted_name': tr[3].text_content().strip(),\n 'agency': tr[4].text_content().strip(),\n 'pdf': tr[5].xpath(\".//a\")[0].get(\"href\")\n }\n \n scraperwiki.sqlite.save(unique_keys=['pdf'], data=data)\nimport scraperwiki, lxml.html, urllib2, re\nfrom datetime import datetime\n\n#html = scraperwiki.scrape(\"http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm\")\ndoc = lxml.html.parse(urllib2.urlopen(\"http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm\"))\nroot = doc.getroot()\n\n#select the table that contains the offenders, ignoring the first one that contains the header row\nfor tr in root.xpath(\"//div[@id='verdiSection10']/div/div/table/tbody/tr\")[1:]:\n data = {\n 'conviction_date': datetime.strptime(\n re.match(\"(\\d+/\\d+/\\d+)\", tr[0].text_content().strip()).group(1),\n \"%d/%m/%Y\"), #sometimes they include two dates in the entry, so we'll have to grab the first (damnit)\n 'business_name': tr[1].text_content().strip(),\n 'business_address': tr[2].text_content().strip(),\n 'convicted_name': tr[3].text_content().strip(),\n 'agency': tr[4].text_content().strip(),\n 'pdf': tr[5].xpath(\".//a\")[0].get(\"href\")\n }\n \n scraperwiki.sqlite.save(unique_keys=['pdf'], data=data)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def sleep(min_seconds=1, max_seconds=10):
"""Allow a browser instance to wait for a few seconds before do something"""
time.sleep(randint(min_seconds, max_seconds))
def click(elem):
try:
elem.click()
except ElementNotInteractableException:
pass
def open_website(url):
"""
Open website of target url
"""
browser = webdriver.Firefox()
browser.get(url)
return browser
def is_finished(browser):
finish_text = '无符合条件的数据...'
sleep_secs = 15
time.sleep(sleep_secs)
result_list = browser.find_element_by_id('resultList')
if finish_text in result_list.text:
logger.info('Try refresh to reload content')
browser.refresh()
time.sleep(sleep_secs)
result_list = browser.find_element_by_id('resultList')
if finish_text in result_list.text:
return True
return False
<|reserved_special_token_0|>
@retry(times=5, delay=5, allowed_exceptions=IndexError)
def save_doc(browser, doc_elem, save_dir):
doc_key = doc_elem.get_attribute('key')
doc_title = doc_elem.get_attribute('title')
logger.info('Found document %s.' % doc_title)
unzipped_id = browser.execute_script('return unzip("%s")' % doc_key)
doc_id = browser.execute_script('return com.str.Decrypt("%s")' %
unzipped_id)
doc_link = DOC_LINK_BASE % doc_id
headers = {'User-Agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'
}
p = re.compile('(var jsonHtmlData = ")(.+)(\\"}";)')
resp = requests.get(doc_link, headers=headers)
resp_text = resp.text
resp_obj = p.findall(resp_text)[0][1].replace('\\', '') + '"}'
resp_obj = json.loads(resp_obj)
os.makedirs(save_dir, exist_ok=True)
with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:
f.write(resp_obj['Html'])
logger.info('Downloaded %s.' % resp_obj['Title'])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def sleep(min_seconds=1, max_seconds=10):
"""Allow a browser instance to wait for a few seconds before do something"""
time.sleep(randint(min_seconds, max_seconds))
def click(elem):
try:
elem.click()
except ElementNotInteractableException:
pass
def open_website(url):
"""
Open website of target url
"""
browser = webdriver.Firefox()
browser.get(url)
return browser
def is_finished(browser):
finish_text = '无符合条件的数据...'
sleep_secs = 15
time.sleep(sleep_secs)
result_list = browser.find_element_by_id('resultList')
if finish_text in result_list.text:
logger.info('Try refresh to reload content')
browser.refresh()
time.sleep(sleep_secs)
result_list = browser.find_element_by_id('resultList')
if finish_text in result_list.text:
return True
return False
def download_docs(browser, save_dir='./', click_next_page=False):
if click_next_page:
next_page = browser.find_elements(By.XPATH,
'//*[@id="pageNumber"]/a[contains(text(), "下一页")]')
next_page[0].click()
if is_finished(browser):
logger.info('Finished downloading documents in this page.')
return
link_xpath = '//*[@class="dataItem"]'
keywords_elems = browser.find_elements(By.XPATH,
'//*[@class="contentCondtion"]')
subfolder = '-'.join([el.text for el in keywords_elems])
elems = browser.find_elements(By.XPATH, link_xpath)
for el in elems:
save_doc(browser, el, os.path.join(save_dir, subfolder))
time.sleep(1)
download_docs(browser, save_dir, click_next_page=True)
@retry(times=5, delay=5, allowed_exceptions=IndexError)
def save_doc(browser, doc_elem, save_dir):
doc_key = doc_elem.get_attribute('key')
doc_title = doc_elem.get_attribute('title')
logger.info('Found document %s.' % doc_title)
unzipped_id = browser.execute_script('return unzip("%s")' % doc_key)
doc_id = browser.execute_script('return com.str.Decrypt("%s")' %
unzipped_id)
doc_link = DOC_LINK_BASE % doc_id
headers = {'User-Agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'
}
p = re.compile('(var jsonHtmlData = ")(.+)(\\"}";)')
resp = requests.get(doc_link, headers=headers)
resp_text = resp.text
resp_obj = p.findall(resp_text)[0][1].replace('\\', '') + '"}'
resp_obj = json.loads(resp_obj)
os.makedirs(save_dir, exist_ok=True)
with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:
f.write(resp_obj['Html'])
logger.info('Downloaded %s.' % resp_obj['Title'])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
logger = get_logger(__name__)
def sleep(min_seconds=1, max_seconds=10):
"""Allow a browser instance to wait for a few seconds before do something"""
time.sleep(randint(min_seconds, max_seconds))
def click(elem):
try:
elem.click()
except ElementNotInteractableException:
pass
def open_website(url):
"""
Open website of target url
"""
browser = webdriver.Firefox()
browser.get(url)
return browser
def is_finished(browser):
finish_text = '无符合条件的数据...'
sleep_secs = 15
time.sleep(sleep_secs)
result_list = browser.find_element_by_id('resultList')
if finish_text in result_list.text:
logger.info('Try refresh to reload content')
browser.refresh()
time.sleep(sleep_secs)
result_list = browser.find_element_by_id('resultList')
if finish_text in result_list.text:
return True
return False
def download_docs(browser, save_dir='./', click_next_page=False):
if click_next_page:
next_page = browser.find_elements(By.XPATH,
'//*[@id="pageNumber"]/a[contains(text(), "下一页")]')
next_page[0].click()
if is_finished(browser):
logger.info('Finished downloading documents in this page.')
return
link_xpath = '//*[@class="dataItem"]'
keywords_elems = browser.find_elements(By.XPATH,
'//*[@class="contentCondtion"]')
subfolder = '-'.join([el.text for el in keywords_elems])
elems = browser.find_elements(By.XPATH, link_xpath)
for el in elems:
save_doc(browser, el, os.path.join(save_dir, subfolder))
time.sleep(1)
download_docs(browser, save_dir, click_next_page=True)
@retry(times=5, delay=5, allowed_exceptions=IndexError)
def save_doc(browser, doc_elem, save_dir):
doc_key = doc_elem.get_attribute('key')
doc_title = doc_elem.get_attribute('title')
logger.info('Found document %s.' % doc_title)
unzipped_id = browser.execute_script('return unzip("%s")' % doc_key)
doc_id = browser.execute_script('return com.str.Decrypt("%s")' %
unzipped_id)
doc_link = DOC_LINK_BASE % doc_id
headers = {'User-Agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'
}
p = re.compile('(var jsonHtmlData = ")(.+)(\\"}";)')
resp = requests.get(doc_link, headers=headers)
resp_text = resp.text
resp_obj = p.findall(resp_text)[0][1].replace('\\', '') + '"}'
resp_obj = json.loads(resp_obj)
os.makedirs(save_dir, exist_ok=True)
with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:
f.write(resp_obj['Html'])
logger.info('Downloaded %s.' % resp_obj['Title'])
<|reserved_special_token_1|>
import time
import itertools
import re
import requests
import json
import os
from random import randint
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.common.exceptions import ElementNotInteractableException
from .exceptions import UnsupportedPlatformException
from .config import get_logger, DOC_LINK_BASE
from .utils import retry
logger = get_logger(__name__)
def sleep(min_seconds=1, max_seconds=10):
"""Allow a browser instance to wait for a few seconds before do something"""
time.sleep(randint(min_seconds, max_seconds))
def click(elem):
try:
elem.click()
except ElementNotInteractableException:
pass
def open_website(url):
"""
Open website of target url
"""
browser = webdriver.Firefox()
browser.get(url)
return browser
def is_finished(browser):
finish_text = '无符合条件的数据...'
sleep_secs = 15
time.sleep(sleep_secs)
result_list = browser.find_element_by_id('resultList')
if finish_text in result_list.text:
logger.info('Try refresh to reload content')
browser.refresh()
time.sleep(sleep_secs)
result_list = browser.find_element_by_id('resultList')
if finish_text in result_list.text:
return True
return False
def download_docs(browser, save_dir='./', click_next_page=False):
if click_next_page:
next_page = browser.find_elements(By.XPATH,
'//*[@id="pageNumber"]/a[contains(text(), "下一页")]')
next_page[0].click()
if is_finished(browser):
logger.info('Finished downloading documents in this page.')
return
link_xpath = '//*[@class="dataItem"]'
keywords_elems = browser.find_elements(By.XPATH,
'//*[@class="contentCondtion"]')
subfolder = '-'.join([el.text for el in keywords_elems])
elems = browser.find_elements(By.XPATH, link_xpath)
for el in elems:
save_doc(browser, el, os.path.join(save_dir, subfolder))
time.sleep(1)
download_docs(browser, save_dir, click_next_page=True)
@retry(times=5, delay=5, allowed_exceptions=IndexError)
def save_doc(browser, doc_elem, save_dir):
doc_key = doc_elem.get_attribute('key')
doc_title = doc_elem.get_attribute('title')
logger.info('Found document %s.' % doc_title)
unzipped_id = browser.execute_script('return unzip("%s")' % doc_key)
doc_id = browser.execute_script('return com.str.Decrypt("%s")' %
unzipped_id)
doc_link = DOC_LINK_BASE % doc_id
headers = {'User-Agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'
}
p = re.compile('(var jsonHtmlData = ")(.+)(\\"}";)')
resp = requests.get(doc_link, headers=headers)
resp_text = resp.text
resp_obj = p.findall(resp_text)[0][1].replace('\\', '') + '"}'
resp_obj = json.loads(resp_obj)
os.makedirs(save_dir, exist_ok=True)
with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:
f.write(resp_obj['Html'])
logger.info('Downloaded %s.' % resp_obj['Title'])
<|reserved_special_token_1|>
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: wenshu/actions.py
# Author: Carolusian <https://github.com/carolusian>
# Date: 22.09.2018
# Last Modified Date: 22.09.2018
#
# Copyright 2018 Carolusian
import time
import itertools
import re
import requests
import json
import os
from random import randint
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.common.exceptions import ElementNotInteractableException
from .exceptions import UnsupportedPlatformException
from .config import get_logger, DOC_LINK_BASE
from .utils import retry
logger = get_logger(__name__)
def sleep(min_seconds=1, max_seconds=10):
"""Allow a browser instance to wait for a few seconds before do something"""
time.sleep(randint(min_seconds, max_seconds))
def click(elem):
try:
elem.click()
except ElementNotInteractableException:
pass
def open_website(url):
"""
Open website of target url
"""
browser = webdriver.Firefox()
browser.get(url)
return browser
def is_finished(browser):
finish_text = '无符合条件的数据...'
sleep_secs = 15
time.sleep(sleep_secs)
result_list = browser.find_element_by_id('resultList')
# Refresh if no result found
if finish_text in result_list.text:
logger.info('Try refresh to reload content')
browser.refresh()
time.sleep(sleep_secs)
# If still not result found, finish downloading
result_list = browser.find_element_by_id('resultList')
if finish_text in result_list.text:
return True
return False
def download_docs(browser, save_dir='./', click_next_page=False):
if click_next_page:
next_page = browser.find_elements(By.XPATH, '//*[@id="pageNumber"]/a[contains(text(), "下一页")]')
next_page[0].click()
if is_finished(browser):
logger.info('Finished downloading documents in this page.')
return
link_xpath = '//*[@class="dataItem"]'
keywords_elems = browser.find_elements(By.XPATH, '//*[@class="contentCondtion"]')
subfolder = '-'.join([el.text for el in keywords_elems])
elems = browser.find_elements(By.XPATH, link_xpath)
for el in elems:
save_doc(browser, el, os.path.join(save_dir, subfolder))
time.sleep(1)
# Goto next page after this page is download
download_docs(browser, save_dir, click_next_page=True)
@retry(times=5, delay=5, allowed_exceptions=IndexError)
def save_doc(browser, doc_elem, save_dir):
doc_key = doc_elem.get_attribute('key')
doc_title = doc_elem.get_attribute('title')
logger.info('Found document %s.' % doc_title)
unzipped_id = browser.execute_script('return unzip("%s")' % doc_key)
doc_id = browser.execute_script('return com.str.Decrypt("%s")' % unzipped_id)
doc_link = DOC_LINK_BASE % doc_id
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
p = re.compile('(var jsonHtmlData = ")(.+)(\\"}";)')
resp = requests.get(doc_link, headers=headers)
resp_text = resp.text
resp_obj = p.findall(resp_text)[0][1].replace('\\', '') + '"}'
resp_obj = json.loads(resp_obj)
os.makedirs(save_dir, exist_ok=True)
with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:
f.write(resp_obj['Html'])
logger.info('Downloaded %s.' % resp_obj['Title'])
|
flexible
|
{
"blob_id": "01de85b0d480c105c8cc1a8154c3de936ab3226d",
"index": 9143,
"step-1": "<mask token>\n\n\ndef sleep(min_seconds=1, max_seconds=10):\n \"\"\"Allow a browser instance to wait for a few seconds before do something\"\"\"\n time.sleep(randint(min_seconds, max_seconds))\n\n\ndef click(elem):\n try:\n elem.click()\n except ElementNotInteractableException:\n pass\n\n\ndef open_website(url):\n \"\"\"\n Open website of target url\n \"\"\"\n browser = webdriver.Firefox()\n browser.get(url)\n return browser\n\n\ndef is_finished(browser):\n finish_text = '无符合条件的数据...'\n sleep_secs = 15\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n logger.info('Try refresh to reload content')\n browser.refresh()\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n return True\n return False\n\n\n<mask token>\n\n\n@retry(times=5, delay=5, allowed_exceptions=IndexError)\ndef save_doc(browser, doc_elem, save_dir):\n doc_key = doc_elem.get_attribute('key')\n doc_title = doc_elem.get_attribute('title')\n logger.info('Found document %s.' % doc_title)\n unzipped_id = browser.execute_script('return unzip(\"%s\")' % doc_key)\n doc_id = browser.execute_script('return com.str.Decrypt(\"%s\")' %\n unzipped_id)\n doc_link = DOC_LINK_BASE % doc_id\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'\n }\n p = re.compile('(var jsonHtmlData = \")(.+)(\\\\\"}\";)')\n resp = requests.get(doc_link, headers=headers)\n resp_text = resp.text\n resp_obj = p.findall(resp_text)[0][1].replace('\\\\', '') + '\"}'\n resp_obj = json.loads(resp_obj)\n os.makedirs(save_dir, exist_ok=True)\n with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:\n f.write(resp_obj['Html'])\n logger.info('Downloaded %s.' % resp_obj['Title'])\n",
"step-2": "<mask token>\n\n\ndef sleep(min_seconds=1, max_seconds=10):\n \"\"\"Allow a browser instance to wait for a few seconds before do something\"\"\"\n time.sleep(randint(min_seconds, max_seconds))\n\n\ndef click(elem):\n try:\n elem.click()\n except ElementNotInteractableException:\n pass\n\n\ndef open_website(url):\n \"\"\"\n Open website of target url\n \"\"\"\n browser = webdriver.Firefox()\n browser.get(url)\n return browser\n\n\ndef is_finished(browser):\n finish_text = '无符合条件的数据...'\n sleep_secs = 15\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n logger.info('Try refresh to reload content')\n browser.refresh()\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n return True\n return False\n\n\ndef download_docs(browser, save_dir='./', click_next_page=False):\n if click_next_page:\n next_page = browser.find_elements(By.XPATH,\n '//*[@id=\"pageNumber\"]/a[contains(text(), \"下一页\")]')\n next_page[0].click()\n if is_finished(browser):\n logger.info('Finished downloading documents in this page.')\n return\n link_xpath = '//*[@class=\"dataItem\"]'\n keywords_elems = browser.find_elements(By.XPATH,\n '//*[@class=\"contentCondtion\"]')\n subfolder = '-'.join([el.text for el in keywords_elems])\n elems = browser.find_elements(By.XPATH, link_xpath)\n for el in elems:\n save_doc(browser, el, os.path.join(save_dir, subfolder))\n time.sleep(1)\n download_docs(browser, save_dir, click_next_page=True)\n\n\n@retry(times=5, delay=5, allowed_exceptions=IndexError)\ndef save_doc(browser, doc_elem, save_dir):\n doc_key = doc_elem.get_attribute('key')\n doc_title = doc_elem.get_attribute('title')\n logger.info('Found document %s.' % doc_title)\n unzipped_id = browser.execute_script('return unzip(\"%s\")' % doc_key)\n doc_id = browser.execute_script('return com.str.Decrypt(\"%s\")' %\n unzipped_id)\n doc_link = DOC_LINK_BASE % doc_id\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'\n }\n p = re.compile('(var jsonHtmlData = \")(.+)(\\\\\"}\";)')\n resp = requests.get(doc_link, headers=headers)\n resp_text = resp.text\n resp_obj = p.findall(resp_text)[0][1].replace('\\\\', '') + '\"}'\n resp_obj = json.loads(resp_obj)\n os.makedirs(save_dir, exist_ok=True)\n with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:\n f.write(resp_obj['Html'])\n logger.info('Downloaded %s.' % resp_obj['Title'])\n",
"step-3": "<mask token>\nlogger = get_logger(__name__)\n\n\ndef sleep(min_seconds=1, max_seconds=10):\n \"\"\"Allow a browser instance to wait for a few seconds before do something\"\"\"\n time.sleep(randint(min_seconds, max_seconds))\n\n\ndef click(elem):\n try:\n elem.click()\n except ElementNotInteractableException:\n pass\n\n\ndef open_website(url):\n \"\"\"\n Open website of target url\n \"\"\"\n browser = webdriver.Firefox()\n browser.get(url)\n return browser\n\n\ndef is_finished(browser):\n finish_text = '无符合条件的数据...'\n sleep_secs = 15\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n logger.info('Try refresh to reload content')\n browser.refresh()\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n return True\n return False\n\n\ndef download_docs(browser, save_dir='./', click_next_page=False):\n if click_next_page:\n next_page = browser.find_elements(By.XPATH,\n '//*[@id=\"pageNumber\"]/a[contains(text(), \"下一页\")]')\n next_page[0].click()\n if is_finished(browser):\n logger.info('Finished downloading documents in this page.')\n return\n link_xpath = '//*[@class=\"dataItem\"]'\n keywords_elems = browser.find_elements(By.XPATH,\n '//*[@class=\"contentCondtion\"]')\n subfolder = '-'.join([el.text for el in keywords_elems])\n elems = browser.find_elements(By.XPATH, link_xpath)\n for el in elems:\n save_doc(browser, el, os.path.join(save_dir, subfolder))\n time.sleep(1)\n download_docs(browser, save_dir, click_next_page=True)\n\n\n@retry(times=5, delay=5, allowed_exceptions=IndexError)\ndef save_doc(browser, doc_elem, save_dir):\n doc_key = doc_elem.get_attribute('key')\n doc_title = doc_elem.get_attribute('title')\n logger.info('Found document %s.' % doc_title)\n unzipped_id = browser.execute_script('return unzip(\"%s\")' % doc_key)\n doc_id = browser.execute_script('return com.str.Decrypt(\"%s\")' %\n unzipped_id)\n doc_link = DOC_LINK_BASE % doc_id\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'\n }\n p = re.compile('(var jsonHtmlData = \")(.+)(\\\\\"}\";)')\n resp = requests.get(doc_link, headers=headers)\n resp_text = resp.text\n resp_obj = p.findall(resp_text)[0][1].replace('\\\\', '') + '\"}'\n resp_obj = json.loads(resp_obj)\n os.makedirs(save_dir, exist_ok=True)\n with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:\n f.write(resp_obj['Html'])\n logger.info('Downloaded %s.' % resp_obj['Title'])\n",
"step-4": "import time\nimport itertools\nimport re\nimport requests\nimport json\nimport os\nfrom random import randint\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import ElementNotInteractableException\nfrom .exceptions import UnsupportedPlatformException\nfrom .config import get_logger, DOC_LINK_BASE\nfrom .utils import retry\nlogger = get_logger(__name__)\n\n\ndef sleep(min_seconds=1, max_seconds=10):\n \"\"\"Allow a browser instance to wait for a few seconds before do something\"\"\"\n time.sleep(randint(min_seconds, max_seconds))\n\n\ndef click(elem):\n try:\n elem.click()\n except ElementNotInteractableException:\n pass\n\n\ndef open_website(url):\n \"\"\"\n Open website of target url\n \"\"\"\n browser = webdriver.Firefox()\n browser.get(url)\n return browser\n\n\ndef is_finished(browser):\n finish_text = '无符合条件的数据...'\n sleep_secs = 15\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n logger.info('Try refresh to reload content')\n browser.refresh()\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n return True\n return False\n\n\ndef download_docs(browser, save_dir='./', click_next_page=False):\n if click_next_page:\n next_page = browser.find_elements(By.XPATH,\n '//*[@id=\"pageNumber\"]/a[contains(text(), \"下一页\")]')\n next_page[0].click()\n if is_finished(browser):\n logger.info('Finished downloading documents in this page.')\n return\n link_xpath = '//*[@class=\"dataItem\"]'\n keywords_elems = browser.find_elements(By.XPATH,\n '//*[@class=\"contentCondtion\"]')\n subfolder = '-'.join([el.text for el in keywords_elems])\n elems = browser.find_elements(By.XPATH, link_xpath)\n for el in elems:\n save_doc(browser, el, os.path.join(save_dir, subfolder))\n time.sleep(1)\n download_docs(browser, save_dir, click_next_page=True)\n\n\n@retry(times=5, delay=5, allowed_exceptions=IndexError)\ndef save_doc(browser, doc_elem, save_dir):\n doc_key = doc_elem.get_attribute('key')\n doc_title = doc_elem.get_attribute('title')\n logger.info('Found document %s.' % doc_title)\n unzipped_id = browser.execute_script('return unzip(\"%s\")' % doc_key)\n doc_id = browser.execute_script('return com.str.Decrypt(\"%s\")' %\n unzipped_id)\n doc_link = DOC_LINK_BASE % doc_id\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'\n }\n p = re.compile('(var jsonHtmlData = \")(.+)(\\\\\"}\";)')\n resp = requests.get(doc_link, headers=headers)\n resp_text = resp.text\n resp_obj = p.findall(resp_text)[0][1].replace('\\\\', '') + '\"}'\n resp_obj = json.loads(resp_obj)\n os.makedirs(save_dir, exist_ok=True)\n with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:\n f.write(resp_obj['Html'])\n logger.info('Downloaded %s.' % resp_obj['Title'])\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n# File: wenshu/actions.py\n# Author: Carolusian <https://github.com/carolusian>\n# Date: 22.09.2018\n# Last Modified Date: 22.09.2018\n#\n# Copyright 2018 Carolusian\n\nimport time\nimport itertools\nimport re\nimport requests\nimport json\nimport os\nfrom random import randint\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import ElementNotInteractableException\nfrom .exceptions import UnsupportedPlatformException\n\nfrom .config import get_logger, DOC_LINK_BASE\nfrom .utils import retry\n\n\nlogger = get_logger(__name__)\n\n\ndef sleep(min_seconds=1, max_seconds=10):\n \"\"\"Allow a browser instance to wait for a few seconds before do something\"\"\"\n time.sleep(randint(min_seconds, max_seconds))\n\n\ndef click(elem):\n try:\n elem.click()\n except ElementNotInteractableException:\n pass\n\n\ndef open_website(url):\n \"\"\"\n Open website of target url\n \"\"\"\n browser = webdriver.Firefox()\n browser.get(url)\n return browser\n\n\ndef is_finished(browser):\n finish_text = '无符合条件的数据...'\n sleep_secs = 15\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n # Refresh if no result found\n if finish_text in result_list.text:\n logger.info('Try refresh to reload content')\n browser.refresh()\n time.sleep(sleep_secs)\n\n # If still not result found, finish downloading\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n return True\n return False\n\n\ndef download_docs(browser, save_dir='./', click_next_page=False): \n if click_next_page:\n next_page = browser.find_elements(By.XPATH, '//*[@id=\"pageNumber\"]/a[contains(text(), \"下一页\")]')\n next_page[0].click()\n if is_finished(browser):\n logger.info('Finished downloading documents in this page.')\n return\n\n link_xpath = '//*[@class=\"dataItem\"]'\n keywords_elems = browser.find_elements(By.XPATH, '//*[@class=\"contentCondtion\"]')\n subfolder = '-'.join([el.text for el in keywords_elems])\n elems = browser.find_elements(By.XPATH, link_xpath)\n for el in elems:\n save_doc(browser, el, os.path.join(save_dir, subfolder))\n time.sleep(1)\n\n # Goto next page after this page is download\n download_docs(browser, save_dir, click_next_page=True)\n\n\n@retry(times=5, delay=5, allowed_exceptions=IndexError)\ndef save_doc(browser, doc_elem, save_dir):\n doc_key = doc_elem.get_attribute('key')\n doc_title = doc_elem.get_attribute('title')\n logger.info('Found document %s.' % doc_title)\n\n unzipped_id = browser.execute_script('return unzip(\"%s\")' % doc_key)\n doc_id = browser.execute_script('return com.str.Decrypt(\"%s\")' % unzipped_id)\n doc_link = DOC_LINK_BASE % doc_id\n\n headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}\n p = re.compile('(var jsonHtmlData = \")(.+)(\\\\\"}\";)')\n \n resp = requests.get(doc_link, headers=headers)\n resp_text = resp.text\n\n resp_obj = p.findall(resp_text)[0][1].replace('\\\\', '') + '\"}'\n resp_obj = json.loads(resp_obj)\n\n os.makedirs(save_dir, exist_ok=True)\n with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:\n f.write(resp_obj['Html'])\n logger.info('Downloaded %s.' % resp_obj['Title'])\n\n\n \n\n\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
ANCHO = 600
ALTO = 800
|
normal
|
{
"blob_id": "71ca67948100fb7ad388934740cead1ebe4a2b52",
"index": 8549,
"step-1": "<mask token>\n",
"step-2": "ANCHO = 600\nALTO = 800\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from schemasheets.schemasheet_datamodel import SchemaSheet
RECORD = "Record"
FIELD = "Field"
METATYPE = "MetaType"
INFO = "Info"
CV = "CV"
PV = "PV"
SDO_MAPPINGS = "schema.org"
WD_MAPPINGS = "wikidata"
DATATYPE = "Datatype"
CASES = [
(1,
[
{
RECORD: "> class",
INFO: " description",
SDO_MAPPINGS: "exact_mappings: {curie_prefix: sdo}",
WD_MAPPINGS: "exact_mappings"
},
{
RECORD: ">",
WD_MAPPINGS: "curie_prefix: wd"
},
]
),
(2,
[
{RECORD: "> class", FIELD: " slot", INFO: " description"},
]
),
(3,
[
{METATYPE: "> metatype", INFO: " description"},
]
),
(4,
[
{CV: "> enum", PV: "permissible_value", INFO: " description"},
]
),
(5,
[
{DATATYPE: "> type", INFO: " description"},
]
),
# unnecessary/incompatible with the latest meta-model
# (6,
# [
# {DATATYPE: "> metaslot.type", INFO: " description"},
# ]
# ),
]
def test_parse_header():
print()
for case_id, case in CASES:
ss = SchemaSheet.from_dictreader(case)
tc = ss.table_config
info_cc = tc.columns[INFO]
assert info_cc.name == INFO
assert info_cc.maps_to == "description"
assert info_cc.metaslot is not None
assert info_cc.metaslot.name == "description"
if case_id == 1 or case_id == 2:
assert tc.metatype_column is None
record_cc = tc.columns[RECORD]
assert record_cc.name == RECORD
assert record_cc.maps_to == "class"
assert record_cc.metaslot is None
if case_id == 2:
field_cc = tc.columns[FIELD]
assert field_cc.name == FIELD
assert field_cc.maps_to == "slot"
assert field_cc.metaslot is None
if case_id == 1:
sdo_cc = tc.columns[SDO_MAPPINGS]
assert sdo_cc.name == SDO_MAPPINGS
assert sdo_cc.maps_to == "exact_mappings"
assert sdo_cc.metaslot is not None
assert sdo_cc.metaslot.name == "exact mappings" or\
sdo_cc.metaslot.name == "exact_mappings"
assert sdo_cc.settings.curie_prefix == "sdo"
wd_cc = tc.columns[WD_MAPPINGS]
assert wd_cc.name == WD_MAPPINGS
assert wd_cc.maps_to == "exact_mappings"
assert wd_cc.metaslot is not None
assert wd_cc.metaslot.name == "exact mappings" or \
wd_cc.metaslot.name == "exact_mappings"
assert wd_cc.settings.curie_prefix == "wd"
if case_id == 3:
assert tc.metatype_column == METATYPE
record_cc = tc.columns[METATYPE]
assert record_cc.name == METATYPE
assert record_cc.maps_to == "metatype"
assert record_cc.metaslot is None
if case_id == 4:
cv_cc = tc.columns[CV]
assert cv_cc.name == CV
assert cv_cc.maps_to == "enum"
assert cv_cc.metaslot is None
pv_cc = tc.columns[PV]
assert pv_cc.name == PV
assert pv_cc.maps_to == "permissible_value"
assert pv_cc.metaslot is None
if case_id == 5:
dt_cc = tc.columns[DATATYPE]
#print(dt_cc)
assert dt_cc.name == DATATYPE
assert dt_cc.maps_to == "type"
assert dt_cc.metaslot is None
if case_id == 6:
# See https://github.com/linkml/schemasheets/issues/75
dt_cc = tc.columns[DATATYPE]
assert dt_cc.name == DATATYPE
assert dt_cc.maps_to == "type"
assert dt_cc.metaslot is not None
assert dt_cc.metaslot.name == "type"
|
normal
|
{
"blob_id": "25dc0395da1f1ac2ccd990151c3e5b250802b402",
"index": 2749,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_parse_header():\n print()\n for case_id, case in CASES:\n ss = SchemaSheet.from_dictreader(case)\n tc = ss.table_config\n info_cc = tc.columns[INFO]\n assert info_cc.name == INFO\n assert info_cc.maps_to == 'description'\n assert info_cc.metaslot is not None\n assert info_cc.metaslot.name == 'description'\n if case_id == 1 or case_id == 2:\n assert tc.metatype_column is None\n record_cc = tc.columns[RECORD]\n assert record_cc.name == RECORD\n assert record_cc.maps_to == 'class'\n assert record_cc.metaslot is None\n if case_id == 2:\n field_cc = tc.columns[FIELD]\n assert field_cc.name == FIELD\n assert field_cc.maps_to == 'slot'\n assert field_cc.metaslot is None\n if case_id == 1:\n sdo_cc = tc.columns[SDO_MAPPINGS]\n assert sdo_cc.name == SDO_MAPPINGS\n assert sdo_cc.maps_to == 'exact_mappings'\n assert sdo_cc.metaslot is not None\n assert sdo_cc.metaslot.name == 'exact mappings' or sdo_cc.metaslot.name == 'exact_mappings'\n assert sdo_cc.settings.curie_prefix == 'sdo'\n wd_cc = tc.columns[WD_MAPPINGS]\n assert wd_cc.name == WD_MAPPINGS\n assert wd_cc.maps_to == 'exact_mappings'\n assert wd_cc.metaslot is not None\n assert wd_cc.metaslot.name == 'exact mappings' or wd_cc.metaslot.name == 'exact_mappings'\n assert wd_cc.settings.curie_prefix == 'wd'\n if case_id == 3:\n assert tc.metatype_column == METATYPE\n record_cc = tc.columns[METATYPE]\n assert record_cc.name == METATYPE\n assert record_cc.maps_to == 'metatype'\n assert record_cc.metaslot is None\n if case_id == 4:\n cv_cc = tc.columns[CV]\n assert cv_cc.name == CV\n assert cv_cc.maps_to == 'enum'\n assert cv_cc.metaslot is None\n pv_cc = tc.columns[PV]\n assert pv_cc.name == PV\n assert pv_cc.maps_to == 'permissible_value'\n assert pv_cc.metaslot is None\n if case_id == 5:\n dt_cc = tc.columns[DATATYPE]\n assert dt_cc.name == DATATYPE\n assert dt_cc.maps_to == 'type'\n assert dt_cc.metaslot is None\n if case_id == 6:\n dt_cc = tc.columns[DATATYPE]\n assert dt_cc.name == DATATYPE\n assert dt_cc.maps_to == 'type'\n assert dt_cc.metaslot is not None\n assert dt_cc.metaslot.name == 'type'\n",
"step-3": "<mask token>\nRECORD = 'Record'\nFIELD = 'Field'\nMETATYPE = 'MetaType'\nINFO = 'Info'\nCV = 'CV'\nPV = 'PV'\nSDO_MAPPINGS = 'schema.org'\nWD_MAPPINGS = 'wikidata'\nDATATYPE = 'Datatype'\nCASES = [(1, [{RECORD: '> class', INFO: ' description', SDO_MAPPINGS:\n 'exact_mappings: {curie_prefix: sdo}', WD_MAPPINGS: 'exact_mappings'},\n {RECORD: '>', WD_MAPPINGS: 'curie_prefix: wd'}]), (2, [{RECORD:\n '> class', FIELD: ' slot', INFO: ' description'}]), (3, [{METATYPE:\n '> metatype', INFO: ' description'}]), (4, [{CV: '> enum', PV:\n 'permissible_value', INFO: ' description'}]), (5, [{DATATYPE: '> type',\n INFO: ' description'}])]\n\n\ndef test_parse_header():\n print()\n for case_id, case in CASES:\n ss = SchemaSheet.from_dictreader(case)\n tc = ss.table_config\n info_cc = tc.columns[INFO]\n assert info_cc.name == INFO\n assert info_cc.maps_to == 'description'\n assert info_cc.metaslot is not None\n assert info_cc.metaslot.name == 'description'\n if case_id == 1 or case_id == 2:\n assert tc.metatype_column is None\n record_cc = tc.columns[RECORD]\n assert record_cc.name == RECORD\n assert record_cc.maps_to == 'class'\n assert record_cc.metaslot is None\n if case_id == 2:\n field_cc = tc.columns[FIELD]\n assert field_cc.name == FIELD\n assert field_cc.maps_to == 'slot'\n assert field_cc.metaslot is None\n if case_id == 1:\n sdo_cc = tc.columns[SDO_MAPPINGS]\n assert sdo_cc.name == SDO_MAPPINGS\n assert sdo_cc.maps_to == 'exact_mappings'\n assert sdo_cc.metaslot is not None\n assert sdo_cc.metaslot.name == 'exact mappings' or sdo_cc.metaslot.name == 'exact_mappings'\n assert sdo_cc.settings.curie_prefix == 'sdo'\n wd_cc = tc.columns[WD_MAPPINGS]\n assert wd_cc.name == WD_MAPPINGS\n assert wd_cc.maps_to == 'exact_mappings'\n assert wd_cc.metaslot is not None\n assert wd_cc.metaslot.name == 'exact mappings' or wd_cc.metaslot.name == 'exact_mappings'\n assert wd_cc.settings.curie_prefix == 'wd'\n if case_id == 3:\n assert tc.metatype_column == METATYPE\n record_cc = tc.columns[METATYPE]\n assert record_cc.name == METATYPE\n assert record_cc.maps_to == 'metatype'\n assert record_cc.metaslot is None\n if case_id == 4:\n cv_cc = tc.columns[CV]\n assert cv_cc.name == CV\n assert cv_cc.maps_to == 'enum'\n assert cv_cc.metaslot is None\n pv_cc = tc.columns[PV]\n assert pv_cc.name == PV\n assert pv_cc.maps_to == 'permissible_value'\n assert pv_cc.metaslot is None\n if case_id == 5:\n dt_cc = tc.columns[DATATYPE]\n assert dt_cc.name == DATATYPE\n assert dt_cc.maps_to == 'type'\n assert dt_cc.metaslot is None\n if case_id == 6:\n dt_cc = tc.columns[DATATYPE]\n assert dt_cc.name == DATATYPE\n assert dt_cc.maps_to == 'type'\n assert dt_cc.metaslot is not None\n assert dt_cc.metaslot.name == 'type'\n",
"step-4": "from schemasheets.schemasheet_datamodel import SchemaSheet\nRECORD = 'Record'\nFIELD = 'Field'\nMETATYPE = 'MetaType'\nINFO = 'Info'\nCV = 'CV'\nPV = 'PV'\nSDO_MAPPINGS = 'schema.org'\nWD_MAPPINGS = 'wikidata'\nDATATYPE = 'Datatype'\nCASES = [(1, [{RECORD: '> class', INFO: ' description', SDO_MAPPINGS:\n 'exact_mappings: {curie_prefix: sdo}', WD_MAPPINGS: 'exact_mappings'},\n {RECORD: '>', WD_MAPPINGS: 'curie_prefix: wd'}]), (2, [{RECORD:\n '> class', FIELD: ' slot', INFO: ' description'}]), (3, [{METATYPE:\n '> metatype', INFO: ' description'}]), (4, [{CV: '> enum', PV:\n 'permissible_value', INFO: ' description'}]), (5, [{DATATYPE: '> type',\n INFO: ' description'}])]\n\n\ndef test_parse_header():\n print()\n for case_id, case in CASES:\n ss = SchemaSheet.from_dictreader(case)\n tc = ss.table_config\n info_cc = tc.columns[INFO]\n assert info_cc.name == INFO\n assert info_cc.maps_to == 'description'\n assert info_cc.metaslot is not None\n assert info_cc.metaslot.name == 'description'\n if case_id == 1 or case_id == 2:\n assert tc.metatype_column is None\n record_cc = tc.columns[RECORD]\n assert record_cc.name == RECORD\n assert record_cc.maps_to == 'class'\n assert record_cc.metaslot is None\n if case_id == 2:\n field_cc = tc.columns[FIELD]\n assert field_cc.name == FIELD\n assert field_cc.maps_to == 'slot'\n assert field_cc.metaslot is None\n if case_id == 1:\n sdo_cc = tc.columns[SDO_MAPPINGS]\n assert sdo_cc.name == SDO_MAPPINGS\n assert sdo_cc.maps_to == 'exact_mappings'\n assert sdo_cc.metaslot is not None\n assert sdo_cc.metaslot.name == 'exact mappings' or sdo_cc.metaslot.name == 'exact_mappings'\n assert sdo_cc.settings.curie_prefix == 'sdo'\n wd_cc = tc.columns[WD_MAPPINGS]\n assert wd_cc.name == WD_MAPPINGS\n assert wd_cc.maps_to == 'exact_mappings'\n assert wd_cc.metaslot is not None\n assert wd_cc.metaslot.name == 'exact mappings' or wd_cc.metaslot.name == 'exact_mappings'\n assert wd_cc.settings.curie_prefix == 'wd'\n if case_id == 3:\n assert tc.metatype_column == METATYPE\n record_cc = tc.columns[METATYPE]\n assert record_cc.name == METATYPE\n assert record_cc.maps_to == 'metatype'\n assert record_cc.metaslot is None\n if case_id == 4:\n cv_cc = tc.columns[CV]\n assert cv_cc.name == CV\n assert cv_cc.maps_to == 'enum'\n assert cv_cc.metaslot is None\n pv_cc = tc.columns[PV]\n assert pv_cc.name == PV\n assert pv_cc.maps_to == 'permissible_value'\n assert pv_cc.metaslot is None\n if case_id == 5:\n dt_cc = tc.columns[DATATYPE]\n assert dt_cc.name == DATATYPE\n assert dt_cc.maps_to == 'type'\n assert dt_cc.metaslot is None\n if case_id == 6:\n dt_cc = tc.columns[DATATYPE]\n assert dt_cc.name == DATATYPE\n assert dt_cc.maps_to == 'type'\n assert dt_cc.metaslot is not None\n assert dt_cc.metaslot.name == 'type'\n",
"step-5": "from schemasheets.schemasheet_datamodel import SchemaSheet\n\nRECORD = \"Record\"\nFIELD = \"Field\"\nMETATYPE = \"MetaType\"\nINFO = \"Info\"\nCV = \"CV\"\nPV = \"PV\"\nSDO_MAPPINGS = \"schema.org\"\nWD_MAPPINGS = \"wikidata\"\nDATATYPE = \"Datatype\"\n\nCASES = [\n (1,\n [\n {\n RECORD: \"> class\",\n INFO: \" description\",\n SDO_MAPPINGS: \"exact_mappings: {curie_prefix: sdo}\",\n WD_MAPPINGS: \"exact_mappings\"\n },\n {\n RECORD: \">\",\n WD_MAPPINGS: \"curie_prefix: wd\"\n },\n ]\n ),\n (2,\n [\n {RECORD: \"> class\", FIELD: \" slot\", INFO: \" description\"},\n ]\n ),\n (3,\n [\n {METATYPE: \"> metatype\", INFO: \" description\"},\n ]\n ),\n (4,\n [\n {CV: \"> enum\", PV: \"permissible_value\", INFO: \" description\"},\n ]\n ),\n (5,\n [\n {DATATYPE: \"> type\", INFO: \" description\"},\n ]\n ),\n # unnecessary/incompatible with the latest meta-model\n # (6,\n # [\n # {DATATYPE: \"> metaslot.type\", INFO: \" description\"},\n # ]\n # ),\n]\n\ndef test_parse_header():\n print()\n for case_id, case in CASES:\n ss = SchemaSheet.from_dictreader(case)\n tc = ss.table_config\n info_cc = tc.columns[INFO]\n assert info_cc.name == INFO\n assert info_cc.maps_to == \"description\"\n assert info_cc.metaslot is not None\n assert info_cc.metaslot.name == \"description\"\n if case_id == 1 or case_id == 2:\n assert tc.metatype_column is None\n record_cc = tc.columns[RECORD]\n assert record_cc.name == RECORD\n assert record_cc.maps_to == \"class\"\n assert record_cc.metaslot is None\n if case_id == 2:\n field_cc = tc.columns[FIELD]\n assert field_cc.name == FIELD\n assert field_cc.maps_to == \"slot\"\n assert field_cc.metaslot is None\n if case_id == 1:\n sdo_cc = tc.columns[SDO_MAPPINGS]\n assert sdo_cc.name == SDO_MAPPINGS\n assert sdo_cc.maps_to == \"exact_mappings\"\n assert sdo_cc.metaslot is not None\n assert sdo_cc.metaslot.name == \"exact mappings\" or\\\n sdo_cc.metaslot.name == \"exact_mappings\"\n assert sdo_cc.settings.curie_prefix == \"sdo\"\n wd_cc = tc.columns[WD_MAPPINGS]\n assert wd_cc.name == WD_MAPPINGS\n assert wd_cc.maps_to == \"exact_mappings\"\n assert wd_cc.metaslot is not None\n assert wd_cc.metaslot.name == \"exact mappings\" or \\\n wd_cc.metaslot.name == \"exact_mappings\"\n assert wd_cc.settings.curie_prefix == \"wd\"\n if case_id == 3:\n assert tc.metatype_column == METATYPE\n record_cc = tc.columns[METATYPE]\n assert record_cc.name == METATYPE\n assert record_cc.maps_to == \"metatype\"\n assert record_cc.metaslot is None\n if case_id == 4:\n cv_cc = tc.columns[CV]\n assert cv_cc.name == CV\n assert cv_cc.maps_to == \"enum\"\n assert cv_cc.metaslot is None\n pv_cc = tc.columns[PV]\n assert pv_cc.name == PV\n assert pv_cc.maps_to == \"permissible_value\"\n assert pv_cc.metaslot is None\n if case_id == 5:\n dt_cc = tc.columns[DATATYPE]\n #print(dt_cc)\n assert dt_cc.name == DATATYPE\n assert dt_cc.maps_to == \"type\"\n assert dt_cc.metaslot is None\n if case_id == 6:\n # See https://github.com/linkml/schemasheets/issues/75\n dt_cc = tc.columns[DATATYPE]\n assert dt_cc.name == DATATYPE\n assert dt_cc.maps_to == \"type\"\n assert dt_cc.metaslot is not None\n assert dt_cc.metaslot.name == \"type\"\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def find_boyer_moore2(T, P):
""" return lowest index of T at which the substring P begins or -1"""
n, m = len(T), len(P)
if m == 0:
return 0
last = {}
for k in range(m):
last[P[k]] = k
i = m - 1
k = m - 1
while i < n:
if T[i] == P[k]:
if k == 0:
return i
else:
i -= 1
k -= 1
else:
j = last.get(T[i], -1)
i += m - min(k, j + 1)
k = m - 1
return -1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def find_boyer_moore(T, P):
""" return lowest index of T at which the substring P begins or -1"""
n, m = len(T), len(P)
if m == 0:
return 0
last = {}
for k in range(m):
last[P[k]] = k
i = m - 1
k = m - 1
while i < n:
if T[i] == P[k]:
""" INCORRECT PART """
i -= 1
k -= 1
if k == 0:
return i
else:
j = last.get(T[i], -1)
i += m - min(k, j + 1)
k = m - 1
return -1
def find_boyer_moore2(T, P):
""" return lowest index of T at which the substring P begins or -1"""
n, m = len(T), len(P)
if m == 0:
return 0
last = {}
for k in range(m):
last[P[k]] = k
i = m - 1
k = m - 1
while i < n:
if T[i] == P[k]:
if k == 0:
return i
else:
i -= 1
k -= 1
else:
j = last.get(T[i], -1)
i += m - min(k, j + 1)
k = m - 1
return -1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def find_boyer_moore(T, P):
""" return lowest index of T at which the substring P begins or -1"""
n, m = len(T), len(P)
if m == 0:
return 0
last = {}
for k in range(m):
last[P[k]] = k
i = m - 1
k = m - 1
while i < n:
if T[i] == P[k]:
""" INCORRECT PART """
i -= 1
k -= 1
if k == 0:
return i
else:
j = last.get(T[i], -1)
i += m - min(k, j + 1)
k = m - 1
return -1
def find_boyer_moore2(T, P):
""" return lowest index of T at which the substring P begins or -1"""
n, m = len(T), len(P)
if m == 0:
return 0
last = {}
for k in range(m):
last[P[k]] = k
i = m - 1
k = m - 1
while i < n:
if T[i] == P[k]:
if k == 0:
return i
else:
i -= 1
k -= 1
else:
j = last.get(T[i], -1)
i += m - min(k, j + 1)
k = m - 1
return -1
<|reserved_special_token_0|>
print(find_boyer_moore2(T, P))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def find_boyer_moore(T, P):
""" return lowest index of T at which the substring P begins or -1"""
n, m = len(T), len(P)
if m == 0:
return 0
last = {}
for k in range(m):
last[P[k]] = k
i = m - 1
k = m - 1
while i < n:
if T[i] == P[k]:
""" INCORRECT PART """
i -= 1
k -= 1
if k == 0:
return i
else:
j = last.get(T[i], -1)
i += m - min(k, j + 1)
k = m - 1
return -1
def find_boyer_moore2(T, P):
""" return lowest index of T at which the substring P begins or -1"""
n, m = len(T), len(P)
if m == 0:
return 0
last = {}
for k in range(m):
last[P[k]] = k
i = m - 1
k = m - 1
while i < n:
if T[i] == P[k]:
if k == 0:
return i
else:
i -= 1
k -= 1
else:
j = last.get(T[i], -1)
i += m - min(k, j + 1)
k = m - 1
return -1
T = 'ddcbacab'
P = 'abacab'
print(find_boyer_moore2(T, P))
<|reserved_special_token_1|>
"""
Pattern matching problem
Boyer Moore algorithm
First is my attempt, below is the code provided in the book
Idea:
Optimize brute force approach using 2 heuristics:
- Looking-Glass: start searches from last character of the
pattern and work backwards
- Character-Jump: During testing of a pattern P, a mismatch
in T[i] = c with corresponding pattern P[k] is handled:
a) if C is not contained in P, shift P completely past i.
b) if c is contained in P shift P until an occurrence of c
gets aligned with T[i]
"""
def find_boyer_moore(T, P):
""" return lowest index of T at which the substring P begins or -1"""
n, m = len(T), len(P)
if m == 0: return 0
last = {} # Using hash table for fast access
for k in range(m):
last[P[k]] = k
i = m - 1 # i index at T, k index at P
k = m - 1 # j index of last occurrence of T[i] in P
while i < n:
if T[i] == P[k]: # if chars are equal
""" INCORRECT PART """
i -= 1 # normal iteration
k -= 1
if k == 0:
return i # check if Patter is complete
else:
# if j < k (remember k index at P)
# shift i += m - (j+1)
# if j > k
# shift i += m - k
j = last.get(T[i], -1) # -1 if item not there
i += m - (min(k, j+1))
k = m - 1
return -1
def find_boyer_moore2(T, P):
""" return lowest index of T at which the substring P begins or -1"""
n, m = len(T), len(P)
if m == 0: return 0
last = {} # Using hash table for fast access
for k in range(m):
last[P[k]] = k
i = m - 1 # i index at T, k index at P
k = m - 1 # j index of last occurrence of T[i] in P
while i < n:
if T[i] == P[k]: # if chars are equal
if k == 0:
return i # check if Patter is complete
else:
i -= 1 # normal iteration
k -= 1
else:
j = last.get(T[i], -1) # -1 if item not there
i += m - (min(k, j+1))
k = m - 1
return -1
# T = "abacaabadcabacabaabb"
T = "ddcbacab"
P = "abacab"
print(find_boyer_moore2(T, P))
|
flexible
|
{
"blob_id": "c418b9b6903ebdad204a3a55f2384a94a3be0d09",
"index": 5561,
"step-1": "<mask token>\n\n\ndef find_boyer_moore2(T, P):\n \"\"\" return lowest index of T at which the substring P begins or -1\"\"\"\n n, m = len(T), len(P)\n if m == 0:\n return 0\n last = {}\n for k in range(m):\n last[P[k]] = k\n i = m - 1\n k = m - 1\n while i < n:\n if T[i] == P[k]:\n if k == 0:\n return i\n else:\n i -= 1\n k -= 1\n else:\n j = last.get(T[i], -1)\n i += m - min(k, j + 1)\n k = m - 1\n return -1\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef find_boyer_moore(T, P):\n \"\"\" return lowest index of T at which the substring P begins or -1\"\"\"\n n, m = len(T), len(P)\n if m == 0:\n return 0\n last = {}\n for k in range(m):\n last[P[k]] = k\n i = m - 1\n k = m - 1\n while i < n:\n if T[i] == P[k]:\n \"\"\" INCORRECT PART \"\"\"\n i -= 1\n k -= 1\n if k == 0:\n return i\n else:\n j = last.get(T[i], -1)\n i += m - min(k, j + 1)\n k = m - 1\n return -1\n\n\ndef find_boyer_moore2(T, P):\n \"\"\" return lowest index of T at which the substring P begins or -1\"\"\"\n n, m = len(T), len(P)\n if m == 0:\n return 0\n last = {}\n for k in range(m):\n last[P[k]] = k\n i = m - 1\n k = m - 1\n while i < n:\n if T[i] == P[k]:\n if k == 0:\n return i\n else:\n i -= 1\n k -= 1\n else:\n j = last.get(T[i], -1)\n i += m - min(k, j + 1)\n k = m - 1\n return -1\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef find_boyer_moore(T, P):\n \"\"\" return lowest index of T at which the substring P begins or -1\"\"\"\n n, m = len(T), len(P)\n if m == 0:\n return 0\n last = {}\n for k in range(m):\n last[P[k]] = k\n i = m - 1\n k = m - 1\n while i < n:\n if T[i] == P[k]:\n \"\"\" INCORRECT PART \"\"\"\n i -= 1\n k -= 1\n if k == 0:\n return i\n else:\n j = last.get(T[i], -1)\n i += m - min(k, j + 1)\n k = m - 1\n return -1\n\n\ndef find_boyer_moore2(T, P):\n \"\"\" return lowest index of T at which the substring P begins or -1\"\"\"\n n, m = len(T), len(P)\n if m == 0:\n return 0\n last = {}\n for k in range(m):\n last[P[k]] = k\n i = m - 1\n k = m - 1\n while i < n:\n if T[i] == P[k]:\n if k == 0:\n return i\n else:\n i -= 1\n k -= 1\n else:\n j = last.get(T[i], -1)\n i += m - min(k, j + 1)\n k = m - 1\n return -1\n\n\n<mask token>\nprint(find_boyer_moore2(T, P))\n",
"step-4": "<mask token>\n\n\ndef find_boyer_moore(T, P):\n \"\"\" return lowest index of T at which the substring P begins or -1\"\"\"\n n, m = len(T), len(P)\n if m == 0:\n return 0\n last = {}\n for k in range(m):\n last[P[k]] = k\n i = m - 1\n k = m - 1\n while i < n:\n if T[i] == P[k]:\n \"\"\" INCORRECT PART \"\"\"\n i -= 1\n k -= 1\n if k == 0:\n return i\n else:\n j = last.get(T[i], -1)\n i += m - min(k, j + 1)\n k = m - 1\n return -1\n\n\ndef find_boyer_moore2(T, P):\n \"\"\" return lowest index of T at which the substring P begins or -1\"\"\"\n n, m = len(T), len(P)\n if m == 0:\n return 0\n last = {}\n for k in range(m):\n last[P[k]] = k\n i = m - 1\n k = m - 1\n while i < n:\n if T[i] == P[k]:\n if k == 0:\n return i\n else:\n i -= 1\n k -= 1\n else:\n j = last.get(T[i], -1)\n i += m - min(k, j + 1)\n k = m - 1\n return -1\n\n\nT = 'ddcbacab'\nP = 'abacab'\nprint(find_boyer_moore2(T, P))\n",
"step-5": "\"\"\"\nPattern matching problem\nBoyer Moore algorithm\n\nFirst is my attempt, below is the code provided in the book\nIdea:\nOptimize brute force approach using 2 heuristics:\n- Looking-Glass: start searches from last character of the\npattern and work backwards\n- Character-Jump: During testing of a pattern P, a mismatch\nin T[i] = c with corresponding pattern P[k] is handled:\na) if C is not contained in P, shift P completely past i.\nb) if c is contained in P shift P until an occurrence of c\ngets aligned with T[i]\n\n\"\"\"\n\n\ndef find_boyer_moore(T, P):\n \"\"\" return lowest index of T at which the substring P begins or -1\"\"\"\n n, m = len(T), len(P)\n if m == 0: return 0\n last = {} # Using hash table for fast access\n for k in range(m):\n last[P[k]] = k\n i = m - 1 # i index at T, k index at P\n k = m - 1 # j index of last occurrence of T[i] in P\n while i < n:\n if T[i] == P[k]: # if chars are equal\n \"\"\" INCORRECT PART \"\"\"\n i -= 1 # normal iteration\n k -= 1\n if k == 0:\n return i # check if Patter is complete\n else:\n # if j < k (remember k index at P)\n # shift i += m - (j+1)\n # if j > k\n # shift i += m - k\n j = last.get(T[i], -1) # -1 if item not there\n i += m - (min(k, j+1))\n k = m - 1\n return -1\n\n\ndef find_boyer_moore2(T, P):\n \"\"\" return lowest index of T at which the substring P begins or -1\"\"\"\n n, m = len(T), len(P)\n if m == 0: return 0\n last = {} # Using hash table for fast access\n for k in range(m):\n last[P[k]] = k\n i = m - 1 # i index at T, k index at P\n k = m - 1 # j index of last occurrence of T[i] in P\n while i < n:\n if T[i] == P[k]: # if chars are equal\n if k == 0:\n return i # check if Patter is complete\n else:\n i -= 1 # normal iteration\n k -= 1\n else:\n j = last.get(T[i], -1) # -1 if item not there\n i += m - (min(k, j+1))\n k = m - 1\n return -1\n\n# T = \"abacaabadcabacabaabb\"\nT = \"ddcbacab\"\nP = \"abacab\"\nprint(find_boyer_moore2(T, P))",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
"""
函数对象有一个__defaults__属性,是保存定位参数和关键字参数默认值的元组,
仅限关键字参数默认值在__kwdefaults__属性中,参数的名称在__code__属性中(__code__本身是对象引用,有很多属性)
使用inspect模块提取函数签名更加方便,很多框架和IDE都是以此来验证代码的
"""
def tag(name, *content, cls=None, **attrs):
""" 生成一个或多个HTML标签 """
if cls is not None:
attrs['class'] = cls
if attrs:
attrs_str = ''.join(' %s="%s"' % (attr, value) for attr, value in attrs.items())
else:
attrs_str = ''
if content:
return '\n'.join('<%s%s>%s</%s>' % (name, attrs_str, c, name) for c in content)
else:
return '<%s%s />' % (name, attrs_str)
print(
tag.__defaults__,
tag.__code__,
tag.__code__.co_varnames,
tag.__code__.co_argcount,
sep = '\n'
)
print()
from inspect import signature
sig = signature(tag)
print(sig)
for name, param in sig.parameters.items(): # name 和 param.name是一样的
print(param.kind, ':', name, '=', param.default)
print()
# signature函数返回的是inspect.Signature对象,它的parameters属性是一个有序映射,这里即sig.parameters,
# 是inspect.Parameter对象,它有name、default、kind,还有annotation属性
# inspect.Signature对象有一个bind方法,可以把任意个参数绑定到签名的形参上
my_tag = {
'name': 'img',
'title': 'Sunset',
'src': 'sunset.jpg',
'cls': 'framed'
}
bound_args = sig.bind(**my_tag)
print(bound_args)
for name, value in bound_args.arguments.items(): # 一个OrderedDict对象
print(name, '=', value)
|
normal
|
{
"blob_id": "a9b895e4d0830320276359944ca6fdc475fd144e",
"index": 7923,
"step-1": "<mask token>\n\n\ndef tag(name, *content, cls=None, **attrs):\n \"\"\" 生成一个或多个HTML标签 \"\"\"\n if cls is not None:\n attrs['class'] = cls\n if attrs:\n attrs_str = ''.join(' %s=\"%s\"' % (attr, value) for attr, value in\n attrs.items())\n else:\n attrs_str = ''\n if content:\n return '\\n'.join('<%s%s>%s</%s>' % (name, attrs_str, c, name) for c in\n content)\n else:\n return '<%s%s />' % (name, attrs_str)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef tag(name, *content, cls=None, **attrs):\n \"\"\" 生成一个或多个HTML标签 \"\"\"\n if cls is not None:\n attrs['class'] = cls\n if attrs:\n attrs_str = ''.join(' %s=\"%s\"' % (attr, value) for attr, value in\n attrs.items())\n else:\n attrs_str = ''\n if content:\n return '\\n'.join('<%s%s>%s</%s>' % (name, attrs_str, c, name) for c in\n content)\n else:\n return '<%s%s />' % (name, attrs_str)\n\n\nprint(tag.__defaults__, tag.__code__, tag.__code__.co_varnames, tag.\n __code__.co_argcount, sep='\\n')\nprint()\n<mask token>\nprint(sig)\nfor name, param in sig.parameters.items():\n print(param.kind, ':', name, '=', param.default)\nprint()\n<mask token>\nprint(bound_args)\nfor name, value in bound_args.arguments.items():\n print(name, '=', value)\n",
"step-3": "<mask token>\n\n\ndef tag(name, *content, cls=None, **attrs):\n \"\"\" 生成一个或多个HTML标签 \"\"\"\n if cls is not None:\n attrs['class'] = cls\n if attrs:\n attrs_str = ''.join(' %s=\"%s\"' % (attr, value) for attr, value in\n attrs.items())\n else:\n attrs_str = ''\n if content:\n return '\\n'.join('<%s%s>%s</%s>' % (name, attrs_str, c, name) for c in\n content)\n else:\n return '<%s%s />' % (name, attrs_str)\n\n\nprint(tag.__defaults__, tag.__code__, tag.__code__.co_varnames, tag.\n __code__.co_argcount, sep='\\n')\nprint()\n<mask token>\nsig = signature(tag)\nprint(sig)\nfor name, param in sig.parameters.items():\n print(param.kind, ':', name, '=', param.default)\nprint()\nmy_tag = {'name': 'img', 'title': 'Sunset', 'src': 'sunset.jpg', 'cls':\n 'framed'}\nbound_args = sig.bind(**my_tag)\nprint(bound_args)\nfor name, value in bound_args.arguments.items():\n print(name, '=', value)\n",
"step-4": "<mask token>\n\n\ndef tag(name, *content, cls=None, **attrs):\n \"\"\" 生成一个或多个HTML标签 \"\"\"\n if cls is not None:\n attrs['class'] = cls\n if attrs:\n attrs_str = ''.join(' %s=\"%s\"' % (attr, value) for attr, value in\n attrs.items())\n else:\n attrs_str = ''\n if content:\n return '\\n'.join('<%s%s>%s</%s>' % (name, attrs_str, c, name) for c in\n content)\n else:\n return '<%s%s />' % (name, attrs_str)\n\n\nprint(tag.__defaults__, tag.__code__, tag.__code__.co_varnames, tag.\n __code__.co_argcount, sep='\\n')\nprint()\nfrom inspect import signature\nsig = signature(tag)\nprint(sig)\nfor name, param in sig.parameters.items():\n print(param.kind, ':', name, '=', param.default)\nprint()\nmy_tag = {'name': 'img', 'title': 'Sunset', 'src': 'sunset.jpg', 'cls':\n 'framed'}\nbound_args = sig.bind(**my_tag)\nprint(bound_args)\nfor name, value in bound_args.arguments.items():\n print(name, '=', value)\n",
"step-5": "\"\"\"\n函数对象有一个__defaults__属性,是保存定位参数和关键字参数默认值的元组,\n仅限关键字参数默认值在__kwdefaults__属性中,参数的名称在__code__属性中(__code__本身是对象引用,有很多属性)\n\n使用inspect模块提取函数签名更加方便,很多框架和IDE都是以此来验证代码的\n\"\"\"\n\n\ndef tag(name, *content, cls=None, **attrs):\n\t\"\"\" 生成一个或多个HTML标签 \"\"\"\n\tif cls is not None:\n\t\tattrs['class'] = cls\n\tif attrs:\n\t\tattrs_str = ''.join(' %s=\"%s\"' % (attr, value) for attr, value in attrs.items())\n\telse:\n\t\tattrs_str = '' \n\tif content:\n\t\treturn '\\n'.join('<%s%s>%s</%s>' % (name, attrs_str, c, name) for c in content)\n\telse:\n\t\treturn '<%s%s />' % (name, attrs_str)\n\n\nprint(\n\ttag.__defaults__,\n\ttag.__code__, \n\ttag.__code__.co_varnames, \n\ttag.__code__.co_argcount,\n\tsep = '\\n'\n\t)\nprint()\n\nfrom inspect import signature\nsig = signature(tag)\nprint(sig)\nfor name, param in sig.parameters.items(): # name 和 param.name是一样的\n\tprint(param.kind, ':', name, '=', param.default)\nprint()\n# signature函数返回的是inspect.Signature对象,它的parameters属性是一个有序映射,这里即sig.parameters,\n# 是inspect.Parameter对象,它有name、default、kind,还有annotation属性\n\n# inspect.Signature对象有一个bind方法,可以把任意个参数绑定到签名的形参上\nmy_tag = {\n\t\t'name': 'img',\n\t\t'title': 'Sunset',\n\t\t'src': 'sunset.jpg',\n\t\t'cls': 'framed'\n\t}\nbound_args = sig.bind(**my_tag)\nprint(bound_args)\nfor name, value in bound_args.arguments.items(): # 一个OrderedDict对象\n\tprint(name, '=', value)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def findSubsequences(self, nums: List[int]) ->List[List[int]]:
res: List[List[int]] = []
s = set()
def deep(pos: int, tmp: List[int]):
if pos == len(nums):
if len(tmp) < 2:
return
for i in range(1, len(tmp)):
if tmp[i - 1] > tmp[i]:
return
if tuple(tmp) not in s:
res.append(tmp)
s.add(tuple(tmp))
else:
deep(pos + 1, tmp)
deep(pos + 1, tmp + [nums[pos]])
deep(0, [])
return res
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def findSubsequences(self, nums: List[int]) ->List[List[int]]:
res: List[List[int]] = []
s = set()
def deep(pos: int, tmp: List[int]):
if pos == len(nums):
if len(tmp) < 2:
return
for i in range(1, len(tmp)):
if tmp[i - 1] > tmp[i]:
return
if tuple(tmp) not in s:
res.append(tmp)
s.add(tuple(tmp))
else:
deep(pos + 1, tmp)
deep(pos + 1, tmp + [nums[pos]])
deep(0, [])
return res
print(Solution().findSubsequences([4, 6, 7, 7]))
<|reserved_special_token_1|>
from typing import List
class Solution:
def findSubsequences(self, nums: List[int]) ->List[List[int]]:
res: List[List[int]] = []
s = set()
def deep(pos: int, tmp: List[int]):
if pos == len(nums):
if len(tmp) < 2:
return
for i in range(1, len(tmp)):
if tmp[i - 1] > tmp[i]:
return
if tuple(tmp) not in s:
res.append(tmp)
s.add(tuple(tmp))
else:
deep(pos + 1, tmp)
deep(pos + 1, tmp + [nums[pos]])
deep(0, [])
return res
print(Solution().findSubsequences([4, 6, 7, 7]))
|
flexible
|
{
"blob_id": "3edfc1098c775fa31456aa3cc938051b2dbb8697",
"index": 1664,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n\n def findSubsequences(self, nums: List[int]) ->List[List[int]]:\n res: List[List[int]] = []\n s = set()\n\n def deep(pos: int, tmp: List[int]):\n if pos == len(nums):\n if len(tmp) < 2:\n return\n for i in range(1, len(tmp)):\n if tmp[i - 1] > tmp[i]:\n return\n if tuple(tmp) not in s:\n res.append(tmp)\n s.add(tuple(tmp))\n else:\n deep(pos + 1, tmp)\n deep(pos + 1, tmp + [nums[pos]])\n deep(0, [])\n return res\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def findSubsequences(self, nums: List[int]) ->List[List[int]]:\n res: List[List[int]] = []\n s = set()\n\n def deep(pos: int, tmp: List[int]):\n if pos == len(nums):\n if len(tmp) < 2:\n return\n for i in range(1, len(tmp)):\n if tmp[i - 1] > tmp[i]:\n return\n if tuple(tmp) not in s:\n res.append(tmp)\n s.add(tuple(tmp))\n else:\n deep(pos + 1, tmp)\n deep(pos + 1, tmp + [nums[pos]])\n deep(0, [])\n return res\n\n\nprint(Solution().findSubsequences([4, 6, 7, 7]))\n",
"step-4": "from typing import List\n\n\nclass Solution:\n\n def findSubsequences(self, nums: List[int]) ->List[List[int]]:\n res: List[List[int]] = []\n s = set()\n\n def deep(pos: int, tmp: List[int]):\n if pos == len(nums):\n if len(tmp) < 2:\n return\n for i in range(1, len(tmp)):\n if tmp[i - 1] > tmp[i]:\n return\n if tuple(tmp) not in s:\n res.append(tmp)\n s.add(tuple(tmp))\n else:\n deep(pos + 1, tmp)\n deep(pos + 1, tmp + [nums[pos]])\n deep(0, [])\n return res\n\n\nprint(Solution().findSubsequences([4, 6, 7, 7]))\n",
"step-5": null,
"step-ids": [
0,
2,
3,
4
]
}
|
[
0,
2,
3,
4
] |
# test CurlypivSetup
"""
Notes about program
"""
# 1.0 import modules
import numpy as np
from skimage import io
import glob
from os.path import join
import matplotlib.pyplot as plt
from curlypiv.utils.calibrateCamera import measureIlluminationDistributionXY, calculate_depth_of_correlation, calculate_darkfield, plot_field_depth
# 2.0 define class
class CurlypivTestSetup(object):
def __init__(self, name, chip, optics, fluid_handling_system):
"""
All the "settings" used in the experimental setup:
1. chip (class)
1.1 solid material (class) (e.g. SiO2)
1.1.1 transparency
1.1.2 fluorescence spectral characteristics
1.1.3 surface charge density
1.1.4 %/vol (here would be 100%)
1.2 channel (class)
1.2.1 height
1.2.2 width
1.2.3 length
1.3 reservoir volume
1.4 electrode configuration (class)
1.4.1 material
1.4.2 separation distance
1.4.3 distance to channel entrance
2. test solution (class)
2.1 liquid material (class) (e.g. electrolyte)
2.1.1 chemical species (e.g. KCl)
2.1.2 concentration
2.1.3 measurable quantity (class) (e.g. conductivity)
2.1.3.1 theoretical
2.1.3.2 measured
2.1.3.2.1 measured conductivity
2.1.3.2.1 measured date
2.1.4 measurable quantity (class) (e.g. pH)
2.1.4.1 theoretical
2.1.4.2 measured
2.1.4.2.1 measured conductivity
2.1.4.2.1 measured date
2.2 fluorescent particles (class)
2.2.0 diameter
2.2.. measurable quantity (class) (e.g. zeta)
2.2.. measurable quantity (class) (e.g electrophoretic mobility)
2.2.. spectral characteristics
2.2.1 solid materials (class) (e.g. polystyrene)
2.2.1.1 %/vol
2.2.2 liquid materials (class) (e.g. DI water)
2.2.3 liquid materials (Class) (e.g. sodium azide)
2.2.3.1 conductivity
2.2.3.2 concentration
3. illumination (class)
3.1 source (class)
3.1.1 type (e.g. Hg lamp)
3.1.2 intensity
3.1.3 emission spectra
3.2 optical element (class) (e.g. excitation filter)
3.3 optical element (class) (e.g. emission filter)
3.4 optical element (class) (e.g. dichroic mirror)
4. microscope
4.1 type (Olympus iX 73)
4.2 objective (class)
4.2.1 numerical aperature (e.g. 0.3)
4.2.2 magnification (e.g. 20X)
4.2.3 field of view (e.g. 500 x 500 um)
4.2.4 depth of focus (e.g 4.1 microns)
"""
self.name = name
self.chip = chip
self.optics = optics
self.fluid_handling_system = fluid_handling_system
class chip(object):
def __init__(self, channel=None, bpe=None, reservoir=None, electrodes=None, fluid_handling_system=None,
material_in_optical_path=None, thickness_in_optical_path=None):
"""
Everything important about the chip
"""
#self.material = material # deprecated so the channel class can hold this information
self.channel = channel
self.bpe = bpe
self.electrodes = electrodes
self.fluid_handling_system = fluid_handling_system
self.material_in_optical_path = material_in_optical_path
self.thickness_in_optical_path = thickness_in_optical_path
class channel(object):
def __init__(self, length=None, width=None, height=None,
material_bottom_wall_surface=None, material_top_wall_surface=None, material_fluid=None):
"""
Everything important about the chip
"""
self.length = length
self.width = width
self.height = height
self.material_bottom_wall_surface = material_bottom_wall_surface # material should only hold relevant electrokinetic data
self.material_top_wall_surface = material_top_wall_surface # material should only hold relevant elect
self.material_fluid = material_fluid # could be a mixture of liquid materials + fluorescent particles
class bpe(object):
def __init__(self, length=None, width=None, height=None, material=None, adhesion_material=None,
dielectric_coating=None):
"""
Everything important about the chip
"""
self.length = length
self.linspace_x = np.linspace(-length/2, length/2, num=100)
self.width = width
self.height = height
self.material = material
if self.material.thickness:
if self.material.thickness != self.height:
raise ValueError("BPE height must equal BPE material thickness")
# adhesion layer used for thin metal film BPE
self.adhesion_material = adhesion_material
# dielectric coating on top of BPE
if dielectric_coating:
self.dielectric_coating = dielectric_coating
else:
self.dielectric_coating = material_solid(name='no_dielectric', permittivity=1, thickness=1e-12, Ka=6, Kb=2, reaction_site_density=5)
class optics(object):
def __init__(self, microscope, fluorescent_particles=None, calibration_grid=None, pixel_to_micron_scaling=None):
self.microscope = microscope
self.fluorescent_particles = fluorescent_particles
self.calibration_grid = calibration_grid
if self.microscope.objective.magnification == 50:
self.pixel_to_micron_scaling = 0.60 # (microns/pixels)
elif self.microscope.objective.magnification == 20:
self.pixel_to_micron_scaling = 1.55 # (microns/pixels)
else:
raise ValueError("Unable to determine microns/pixels scaling because objective magnification not 50X or 20X")
if pixel_to_micron_scaling is not None:
print("Manual input of pixel_to_micron_scaling is deprecated. A scaling factor of {} um/pix for {} magnification was instantiated.".format(self.pixel_to_micron_scaling, self.microscope.objective.magnification))
"""
--- I THINK THIS SECTION IS DEPRECATED ---
Notes: deprecated because calculating the scaling factor or entering it manually is too confusing. I have
permanently figured out the correct scaling.
if microscope.objective.pixel_to_micron is not None and pixel_to_micron_scaling is None:
self.pixel_to_micron = microscope.objective.pixel_to_micron
elif microscope.objective.pixel_to_micron is not None and pixel_to_micron_scaling is not None and microscope.objective.pixel_to_micron != pixel_to_micron_scaling:
raise ValueError("Conflicting scaling factors: microscope.objective={}, optics={}".format(microscope.objective.pixel_to_micron, pixel_to_micron_scaling))
elif microscope.objective.pixel_to_micron is None and pixel_to_micron_scaling is not None:
self.pixel_to_micron = pixel_to_micron_scaling
"""
class illumination(object):
def __init__(self, basePath=None, source=None, excitation=None, emission=None, dichroic=None, illumination_distribution=None,
calculate_illumination_distribution=False,
illumPath=None, illumSavePath=None, illumSaveName=None, showIllumPlot=False, save_txt=False, save_plot=False, save_image=False):
"""
details about the optical setup
:param source:
:param excitation:
:param emission:
:param dichroic:
"""
self.basePath = basePath # this should come from CurlypivTestCollection
self.source = source
self.excitation_wavelength = excitation
self.emission_wavelength = emission
self.dichroic = dichroic
if illumination_distribution is not None:
self.illumination_distribution = illumination_distribution
elif illumPath is not None:
flatfield = io.imread(illumPath, plugin='tifffile')
if len(np.shape(flatfield)) > 2:
flatfield = np.asarray(np.rint(np.mean(flatfield, axis=0)), dtype='uint16')
self.illumination_distribution = flatfield
elif calculate_illumination_distribution and illumination_distribution is None:
self.illumination_distribution = measureIlluminationDistributionXY(basePath=self.basePath, illumPath=illumPath,
show_image=showIllumPlot, save_image=save_image, save_img_type='.tif',
save_txt=save_txt, show_plot=showIllumPlot, save_plot=save_plot,
savePath=illumSavePath, savename=illumSaveName)
else:
self.illumination_distribution = illumination_distribution
self.flatfield = self.illumination_distribution
if self.flatfield is not None:
self.flatfield_mean = np.mean(self.flatfield)
self.flatfield_std = np.std(self.flatfield)
class darkfield(object):
def __init__(self, basePath, darkframePath=None, flip_image_across_axis=None, show_image=False, save_image=False, save_img_type='.tif',
savePath=None, savename=None, save_plot=False):
"""
details about dark field image
"""
self.basePath = basePath
img, mean, std = calculate_darkfield(self.basePath, darkframePath=darkframePath, flip_image_axes=flip_image_across_axis, show_image=show_image, save_image=save_image, save_img_type=save_img_type,
savePath=savePath, savename=savename, save_plot=save_plot)
self.img = img
self.mean = mean
self.std = std
class microscope(object):
def __init__(self, type, objective, illumination, ccd):
"""
describes the micrscope setup
:param type:
:param objective:
"""
self.type = type # e.g. Olympus iX73
self.objective = objective
self.illumination = illumination
self.ccd = ccd
class ccd(object):
def __init__(self, exposure_time, img_acq_rate, EM_gain, name='iXon Ultra 897', img_acq_type='emcdd', darkfield=None, binning=None,
vertical_pixel_shift_speed=0.5e-6, horizontal_pixel_shift_speed=0.1e-6, horizontal_pixel_shift_rate_bits=14,
frame_transfer=True, crop_mode=False, acquisition_mode='kinetic', triggering='internal', readout_mode='image',
pixels=512, pixel_size=16e-6):
"""
describe the CCD class
"""
self.name = name
self.img_acq_type = img_acq_type
self.exposure_time = exposure_time
self.img_acq_rate = img_acq_rate
self.em_gain = EM_gain
self.darkfield = darkfield
self.binning = binning
# supporting camera acquisition settings
self.vpss = vertical_pixel_shift_speed
self.hpss = horizontal_pixel_shift_speed
self.hpss_bits = horizontal_pixel_shift_rate_bits
self.frame_transfer = frame_transfer
self.crop_mode = crop_mode
self.acquisition_mode = acquisition_mode
self.triggering = triggering
self.readout_mode = readout_mode
if isinstance(pixels, int):
self.pixels = (pixels, pixels)
else:
self.pixels = pixels
self.pixel_size = pixel_size
self.image_area = (self.pixels[0]*pixel_size, self.pixels[1]*pixel_size)
class objective(object):
def __init__(self, fluoro_particle, name=None, numerical_aperture=None, magnification=None, basePath=None, channel_height=None, illumination=None, wavelength=None, microgrid=None, auto_calc_pix_to_micron_scaling=False, pixel_to_micron=None, field_number=None, n0=1, show_depth_plot=False, save_depth_plot=False):
"""
Objectives in the Pennathur Lab Dark Room uScope:
20X - LCPlanFL N 20X LCD [LCPLFLN20xLCD]
magnification: 20
numerical_aperture: 0.45
field_number: 26.5
working distance: 7.4 - 8.3 mm
transmittance: 90% @ 425 - 670 nm
correction collar: 0 - 1.2 mm
microns per pixel: 1.55
50X - LCPlanFL N 50x LCD [LCPLFLN50xLCD]
magnification: 50
numerical aperture: 0.7
field number: 26.5
working distance: 2.2 - 3 mm
transmittance: 90% @ 425 - 650 nm
correction collar: 0 - 1.2 mm
microns per pixel: 0.6
Manufacturer website: https://www.olympus-ims.com/en/microscope/lcplfln-lcd/#!cms[focus]=cmsContent11428
"""
# if name is entered, then pull all the terms directly
self.name = name
if name == 'LCPLFLN20xLCD':
self.magnification = 20
self.numerical_aperture = 0.45
self.field_number = 26.5
self.transmittance = 0.9
self.pixel_to_micron = 1.55
elif name == 'LCPLFLN50xLCD':
self.magnification = 50
self.numerical_aperture = 0.7
self.field_number = 26.5
self.transmittance = 0.9
self.pixel_to_micron = 0.6
else:
self.numerical_aperture = numerical_aperture
self.magnification = magnification
self.field_number = field_number
# general terms
self._illumination = illumination
if self._illumination is not None:
self._wavelength = self._illumination.emission_wavelength
elif wavelength is not None:
self._wavelength = wavelength
else:
raise ValueError("A wavelength is required via the <illumination> class or <wavelength> input parameter")
self._pd = fluoro_particle.diameter
self._n0 = n0
self.calculate_depth_of_field()
self.calculate_depth_of_correlation()
if field_number:
self.calculate_field_of_view()
if show_depth_plot or save_depth_plot:
plot_field_depth(depth_of_corr=self.depth_of_correlation, depth_of_field=self.depth_of_field, show_depth_plot=show_depth_plot, save_depth_plot=save_depth_plot,
basePath=basePath, savename=None, channel_height=channel_height, objective=self.magnification)
# grids and scaling factors
if auto_calc_pix_to_micron_scaling and self.pixel_to_micron is None:
self.microgrid = microgrid
self.calculate_pixel_to_micron_scaling()
def calculate_field_of_view(self):
self.field_of_view = self.field_number / self.magnification
def calculate_depth_of_field(self, e=16e-6, n=1):
"""
e: CCD pixel resolution example: e = 16 um (16 microns is the pixel size)
"""
self.depth_of_field = self._wavelength*n/self.numerical_aperture**2+e*n/(self.magnification*self.numerical_aperture)
def calculate_depth_of_correlation(self, eps=0.01):
# step 0: define
n = self._n0
dp = self._pd
NA = self.numerical_aperture
M = self.magnification
lmbda = self._wavelength
# step 1: calculate the depth of correlation for the optical setup
depth_of_correlation = calculate_depth_of_correlation(M=M, NA=NA, dp=dp, n=n, lmbda=lmbda, eps=eps)
self.depth_of_correlation = depth_of_correlation
def calculate_pixel_to_micron_scaling(self):
if self.microgrid is None:
raise ValueError("Need objective.microgrid property in order to calculate scaling factor")
# script to calculate scaling factor from grid
# would go here
@property
def NA(self):
return self.numerical_aperture
@property
def M(self):
return self.magnification
class microgrid(object):
def __init__(self, gridPath=None, center_to_center_spacing=None, feature_width=None, grid_type='grid', show_grid=False):
"""
this class holds images for the microgrid and performs pixel to micron scaling calculations
"""
if gridPath is not None:
self.gridPath = gridPath
self.spacing = center_to_center_spacing
self.width = feature_width
self.grid_type = grid_type
# find files in directory
file_list = glob.glob(join(self.gridPath, 'grid*.tif'))
if len(file_list) < 1:
raise ValueError("No grid*.tif files found in {}".format(self.gridPath))
img_grid = np.zeros(shape=(512,512))
for f in file_list:
img = io.imread(f, plugin='tifffile')
if len(np.shape(img)) > 2:
img = np.mean(img, axis=0)
img_grid += img
img_grid = img_grid / len(file_list)
self.img_grid = img_grid
if show_grid is True:
fig, ax = plt.subplots()
ax.imshow(img_grid, cmap='gray')
ax.set_xlabel('pixels')
ax.set_ylabel('pixels')
plt.title('grid: 10 um Lines; 50 um Spacing')
plt.show()
class fluorescent_particles(object):
def __init__(self, name=None, materials=None,diameter=None,fluorescence_spectra=None, concentration=None,
electrophoretic_mobility=None, zeta=None):
"""
the details of the fluroescent particles used
:param materials:
:param diameter:
:param fluorescence_spectra:
:param concentration:
:param electrophoretic_mobility:
:param zeta:
"""
self.name = name
self.materials=materials
self.concentration=concentration
self.electrophoretic_mobility=electrophoretic_mobility
self.zeta=zeta
self.diameter=diameter
if diameter:
k_b = 1.3806e-23
T=298
mu=0.001
self.diffusivity = k_b*T/(6*np.pi*mu*diameter/2)
self.fluorescence_spectra=fluorescence_spectra
class reservoir(object):
def __init__(self, diameter, height, height_of_reservoir=None, material=None):
"""
describes the micrscope setup
:param type:
:param objective:
"""
g = 9.81 # m/s**2
self.material = material
self.diameter = diameter
self.height = height
self.volume = np.pi*self.diameter**2/4
self.height_of_reservoir = height_of_reservoir
if material and height_of_reservoir:
self.hydrostatic_pressure = material.density*g*self.height_of_reservoir
class fluid_handling_system(object):
def __init__(self, fluid_reservoir=None, all_tubing=None, onchip_reservoir=None):
"""
describes the fluid handling system
"""
self.fluid_reservoir=fluid_reservoir
self.all_tubing = all_tubing
self.onchip_reservoir = onchip_reservoir
class tubing(object):
def __init__(self, inner_diameter=None, length=None, material=None):
"""
describes each segment of tubing
"""
self.inner_diameter = inner_diameter
self.length = length
self.material = material
class optical_element(object):
def __init__(self, passing_wavelengths=None, reflectivity=None):
"""
this class describes the optical characteristics of any material or element
:param wavelength_bandpass:
"""
self.passing_wavelengths=passing_wavelengths
self.reflectivity=reflectivity
class measurable_quantity(object):
def __init__(self, reference_value=None, measured_value=None):
"""
what value was measured and when
"""
self.reference_value = reference_value
self.measured_value = measured_value
class measurement(object):
def __init__(self, value=None, date=None):
"""
Object for storing measurements
:param value:
:param date:
"""
self.value = value
self.date = date
class electrode_configuration(object):
def __init__(self, material=None, length=None, entrance_length=None):
"""
Object for holding electrode configuration details
:param material:
:param length:
:param entrance_length:
"""
self.material = material
self.length = length
self.entrance_length = entrance_length
class material_solid(object):
def __init__(self, name=None, zeta=None, concentration=None, index_of_refraction=None, transparency=None, fluorescence_spectra=None,
permittivity=None, conductivity=None, thickness=None, youngs_modulus=None, poissons_ratio=None,
density=None, dielectric_strength=None, reaction_site_density=None, Ka=None, Kb=None, width=None, length=None):
"""
everything about a material
:param transparency:
:param fluorescence_spectra:
:param zeta:
"""
# identity
self.name = name
# geometry
self.length = length
self.width = width
self.thickness = thickness
# mechanical
self.density = density
self.concentration = concentration # For a solid, this is % by volume.
self.youngs_modulus = youngs_modulus
self.poissons_ratio = poissons_ratio
# optical
self.index_of_refraction = index_of_refraction
self.fluorescence_spectra = fluorescence_spectra
self.transparency = transparency
if self.transparency:
self.reflectivity = 1 / self.transparency
# electrochemical
self.conductivity = conductivity
if permittivity:
self.permittivity = permittivity
self.zeta = zeta
self.dielectric_strength = dielectric_strength
if reaction_site_density:
self.reaction_site_density = reaction_site_density*1e18 # (#/nm2) surface density of reaction sites: accepts nm2 and converts to m2 (see Squires)
self.Ka = Ka # reaction equilibrium constant - upper bound
self.Kb = Kb # reaction equilibrium constant - lower bound
class material_liquid(object):
def __init__(self, name=None, species=None, concentration=None, conductivity=None, pH=None, density=None, viscosity=None,
permittivity=None, temperature=None, valence=1.0):
"""
everything about a liquid
:param species:
:param concentration:
:param conductivity:
:param pH:
"""
# identity
self.name = name
# electro/chemical
self.species = species
self.concentration = concentration # (mmol) = (mmol/L) = (mol/m3)
self.conductivity = conductivity
if permittivity:
self.permittivity = permittivity
if pH:
self.pH = pH
self.c_H = 10**-pH * 1e3 # (mmol) = (mmol/L) = (mol/m3); (concentration of Hydrogen ions (H+)
self.valence = valence
# mechanical
self.density = density
self.viscosity = viscosity
self.temperature = temperature
self.diffusivity = 2e-9 # (m^2/s) Diffusivity of KCl in DI water [Soni]
|
normal
|
{
"blob_id": "6ca7b896cc20220f790c06d4ba08fef7bda8400f",
"index": 3301,
"step-1": "<mask token>\n\n\nclass illumination(object):\n <mask token>\n\n\nclass darkfield(object):\n\n def __init__(self, basePath, darkframePath=None, flip_image_across_axis\n =None, show_image=False, save_image=False, save_img_type='.tif',\n savePath=None, savename=None, save_plot=False):\n \"\"\"\n details about dark field image\n\n \"\"\"\n self.basePath = basePath\n img, mean, std = calculate_darkfield(self.basePath, darkframePath=\n darkframePath, flip_image_axes=flip_image_across_axis,\n show_image=show_image, save_image=save_image, save_img_type=\n save_img_type, savePath=savePath, savename=savename, save_plot=\n save_plot)\n self.img = img\n self.mean = mean\n self.std = std\n\n\nclass microscope(object):\n\n def __init__(self, type, objective, illumination, ccd):\n \"\"\"\n describes the micrscope setup\n :param type:\n :param objective:\n \"\"\"\n self.type = type\n self.objective = objective\n self.illumination = illumination\n self.ccd = ccd\n\n\nclass ccd(object):\n\n def __init__(self, exposure_time, img_acq_rate, EM_gain, name=\n 'iXon Ultra 897', img_acq_type='emcdd', darkfield=None, binning=\n None, vertical_pixel_shift_speed=5e-07,\n horizontal_pixel_shift_speed=1e-07,\n horizontal_pixel_shift_rate_bits=14, frame_transfer=True, crop_mode\n =False, acquisition_mode='kinetic', triggering='internal',\n readout_mode='image', pixels=512, pixel_size=1.6e-05):\n \"\"\"\n describe the CCD class\n \"\"\"\n self.name = name\n self.img_acq_type = img_acq_type\n self.exposure_time = exposure_time\n self.img_acq_rate = img_acq_rate\n self.em_gain = EM_gain\n self.darkfield = darkfield\n self.binning = binning\n self.vpss = vertical_pixel_shift_speed\n self.hpss = horizontal_pixel_shift_speed\n self.hpss_bits = horizontal_pixel_shift_rate_bits\n self.frame_transfer = frame_transfer\n self.crop_mode = crop_mode\n self.acquisition_mode = acquisition_mode\n self.triggering = triggering\n self.readout_mode = readout_mode\n if isinstance(pixels, int):\n self.pixels = pixels, pixels\n else:\n self.pixels = pixels\n self.pixel_size = pixel_size\n self.image_area = self.pixels[0] * pixel_size, self.pixels[1\n ] * pixel_size\n\n\nclass objective(object):\n\n def __init__(self, fluoro_particle, name=None, numerical_aperture=None,\n magnification=None, basePath=None, channel_height=None,\n illumination=None, wavelength=None, microgrid=None,\n auto_calc_pix_to_micron_scaling=False, pixel_to_micron=None,\n field_number=None, n0=1, show_depth_plot=False, save_depth_plot=False):\n \"\"\"\n\n Objectives in the Pennathur Lab Dark Room uScope:\n\n 20X - LCPlanFL N 20X LCD [LCPLFLN20xLCD]\n magnification: 20\n numerical_aperture: 0.45\n field_number: 26.5\n working distance: 7.4 - 8.3 mm\n transmittance: 90% @ 425 - 670 nm\n correction collar: 0 - 1.2 mm\n microns per pixel: 1.55\n 50X - LCPlanFL N 50x LCD [LCPLFLN50xLCD]\n magnification: 50\n numerical aperture: 0.7\n field number: 26.5\n working distance: 2.2 - 3 mm\n transmittance: 90% @ 425 - 650 nm\n correction collar: 0 - 1.2 mm\n microns per pixel: 0.6\n\n Manufacturer website: https://www.olympus-ims.com/en/microscope/lcplfln-lcd/#!cms[focus]=cmsContent11428\n \"\"\"\n self.name = name\n if name == 'LCPLFLN20xLCD':\n self.magnification = 20\n self.numerical_aperture = 0.45\n self.field_number = 26.5\n self.transmittance = 0.9\n self.pixel_to_micron = 1.55\n elif name == 'LCPLFLN50xLCD':\n self.magnification = 50\n self.numerical_aperture = 0.7\n self.field_number = 26.5\n self.transmittance = 0.9\n self.pixel_to_micron = 0.6\n else:\n self.numerical_aperture = numerical_aperture\n self.magnification = magnification\n self.field_number = field_number\n self._illumination = illumination\n if self._illumination is not None:\n self._wavelength = self._illumination.emission_wavelength\n elif wavelength is not None:\n self._wavelength = wavelength\n else:\n raise ValueError(\n 'A wavelength is required via the <illumination> class or <wavelength> input parameter'\n )\n self._pd = fluoro_particle.diameter\n self._n0 = n0\n self.calculate_depth_of_field()\n self.calculate_depth_of_correlation()\n if field_number:\n self.calculate_field_of_view()\n if show_depth_plot or save_depth_plot:\n plot_field_depth(depth_of_corr=self.depth_of_correlation,\n depth_of_field=self.depth_of_field, show_depth_plot=\n show_depth_plot, save_depth_plot=save_depth_plot, basePath=\n basePath, savename=None, channel_height=channel_height,\n objective=self.magnification)\n if auto_calc_pix_to_micron_scaling and self.pixel_to_micron is None:\n self.microgrid = microgrid\n self.calculate_pixel_to_micron_scaling()\n\n def calculate_field_of_view(self):\n self.field_of_view = self.field_number / self.magnification\n\n def calculate_depth_of_field(self, e=1.6e-05, n=1):\n \"\"\"\n e: CCD pixel resolution example: e = 16 um (16 microns is the pixel size)\n \"\"\"\n self.depth_of_field = (self._wavelength * n / self.\n numerical_aperture ** 2 + e * n / (self.magnification * self.\n numerical_aperture))\n\n def calculate_depth_of_correlation(self, eps=0.01):\n n = self._n0\n dp = self._pd\n NA = self.numerical_aperture\n M = self.magnification\n lmbda = self._wavelength\n depth_of_correlation = calculate_depth_of_correlation(M=M, NA=NA,\n dp=dp, n=n, lmbda=lmbda, eps=eps)\n self.depth_of_correlation = depth_of_correlation\n\n def calculate_pixel_to_micron_scaling(self):\n if self.microgrid is None:\n raise ValueError(\n 'Need objective.microgrid property in order to calculate scaling factor'\n )\n\n @property\n def NA(self):\n return self.numerical_aperture\n\n @property\n def M(self):\n return self.magnification\n\n\nclass microgrid(object):\n\n def __init__(self, gridPath=None, center_to_center_spacing=None,\n feature_width=None, grid_type='grid', show_grid=False):\n \"\"\"\n this class holds images for the microgrid and performs pixel to micron scaling calculations\n \"\"\"\n if gridPath is not None:\n self.gridPath = gridPath\n self.spacing = center_to_center_spacing\n self.width = feature_width\n self.grid_type = grid_type\n file_list = glob.glob(join(self.gridPath, 'grid*.tif'))\n if len(file_list) < 1:\n raise ValueError('No grid*.tif files found in {}'.format(\n self.gridPath))\n img_grid = np.zeros(shape=(512, 512))\n for f in file_list:\n img = io.imread(f, plugin='tifffile')\n if len(np.shape(img)) > 2:\n img = np.mean(img, axis=0)\n img_grid += img\n img_grid = img_grid / len(file_list)\n self.img_grid = img_grid\n if show_grid is True:\n fig, ax = plt.subplots()\n ax.imshow(img_grid, cmap='gray')\n ax.set_xlabel('pixels')\n ax.set_ylabel('pixels')\n plt.title('grid: 10 um Lines; 50 um Spacing')\n plt.show()\n\n\nclass fluorescent_particles(object):\n\n def __init__(self, name=None, materials=None, diameter=None,\n fluorescence_spectra=None, concentration=None,\n electrophoretic_mobility=None, zeta=None):\n \"\"\"\n the details of the fluroescent particles used\n :param materials:\n :param diameter:\n :param fluorescence_spectra:\n :param concentration:\n :param electrophoretic_mobility:\n :param zeta:\n \"\"\"\n self.name = name\n self.materials = materials\n self.concentration = concentration\n self.electrophoretic_mobility = electrophoretic_mobility\n self.zeta = zeta\n self.diameter = diameter\n if diameter:\n k_b = 1.3806e-23\n T = 298\n mu = 0.001\n self.diffusivity = k_b * T / (6 * np.pi * mu * diameter / 2)\n self.fluorescence_spectra = fluorescence_spectra\n\n\nclass reservoir(object):\n\n def __init__(self, diameter, height, height_of_reservoir=None, material\n =None):\n \"\"\"\n describes the micrscope setup\n :param type:\n :param objective:\n \"\"\"\n g = 9.81\n self.material = material\n self.diameter = diameter\n self.height = height\n self.volume = np.pi * self.diameter ** 2 / 4\n self.height_of_reservoir = height_of_reservoir\n if material and height_of_reservoir:\n self.hydrostatic_pressure = (material.density * g * self.\n height_of_reservoir)\n\n\nclass fluid_handling_system(object):\n\n def __init__(self, fluid_reservoir=None, all_tubing=None,\n onchip_reservoir=None):\n \"\"\"\n describes the fluid handling system\n \"\"\"\n self.fluid_reservoir = fluid_reservoir\n self.all_tubing = all_tubing\n self.onchip_reservoir = onchip_reservoir\n\n\nclass tubing(object):\n\n def __init__(self, inner_diameter=None, length=None, material=None):\n \"\"\"\n describes each segment of tubing\n\n \"\"\"\n self.inner_diameter = inner_diameter\n self.length = length\n self.material = material\n\n\nclass optical_element(object):\n\n def __init__(self, passing_wavelengths=None, reflectivity=None):\n \"\"\"\n this class describes the optical characteristics of any material or element\n :param wavelength_bandpass:\n \"\"\"\n self.passing_wavelengths = passing_wavelengths\n self.reflectivity = reflectivity\n\n\nclass measurable_quantity(object):\n\n def __init__(self, reference_value=None, measured_value=None):\n \"\"\"\n what value was measured and when\n \"\"\"\n self.reference_value = reference_value\n self.measured_value = measured_value\n\n\nclass measurement(object):\n\n def __init__(self, value=None, date=None):\n \"\"\"\n Object for storing measurements\n :param value:\n :param date:\n \"\"\"\n self.value = value\n self.date = date\n\n\nclass electrode_configuration(object):\n\n def __init__(self, material=None, length=None, entrance_length=None):\n \"\"\"\n Object for holding electrode configuration details\n :param material:\n :param length:\n :param entrance_length:\n \"\"\"\n self.material = material\n self.length = length\n self.entrance_length = entrance_length\n\n\nclass material_solid(object):\n\n def __init__(self, name=None, zeta=None, concentration=None,\n index_of_refraction=None, transparency=None, fluorescence_spectra=\n None, permittivity=None, conductivity=None, thickness=None,\n youngs_modulus=None, poissons_ratio=None, density=None,\n dielectric_strength=None, reaction_site_density=None, Ka=None, Kb=\n None, width=None, length=None):\n \"\"\"\n everything about a material\n :param transparency:\n :param fluorescence_spectra:\n :param zeta:\n \"\"\"\n self.name = name\n self.length = length\n self.width = width\n self.thickness = thickness\n self.density = density\n self.concentration = concentration\n self.youngs_modulus = youngs_modulus\n self.poissons_ratio = poissons_ratio\n self.index_of_refraction = index_of_refraction\n self.fluorescence_spectra = fluorescence_spectra\n self.transparency = transparency\n if self.transparency:\n self.reflectivity = 1 / self.transparency\n self.conductivity = conductivity\n if permittivity:\n self.permittivity = permittivity\n self.zeta = zeta\n self.dielectric_strength = dielectric_strength\n if reaction_site_density:\n self.reaction_site_density = reaction_site_density * 1e+18\n self.Ka = Ka\n self.Kb = Kb\n\n\nclass material_liquid(object):\n\n def __init__(self, name=None, species=None, concentration=None,\n conductivity=None, pH=None, density=None, viscosity=None,\n permittivity=None, temperature=None, valence=1.0):\n \"\"\"\n everything about a liquid\n :param species:\n :param concentration:\n :param conductivity:\n :param pH:\n \"\"\"\n self.name = name\n self.species = species\n self.concentration = concentration\n self.conductivity = conductivity\n if permittivity:\n self.permittivity = permittivity\n if pH:\n self.pH = pH\n self.c_H = 10 ** -pH * 1000.0\n self.valence = valence\n self.density = density\n self.viscosity = viscosity\n self.temperature = temperature\n self.diffusivity = 2e-09\n",
"step-2": "<mask token>\n\n\nclass bpe(object):\n <mask token>\n\n\nclass optics(object):\n\n def __init__(self, microscope, fluorescent_particles=None,\n calibration_grid=None, pixel_to_micron_scaling=None):\n self.microscope = microscope\n self.fluorescent_particles = fluorescent_particles\n self.calibration_grid = calibration_grid\n if self.microscope.objective.magnification == 50:\n self.pixel_to_micron_scaling = 0.6\n elif self.microscope.objective.magnification == 20:\n self.pixel_to_micron_scaling = 1.55\n else:\n raise ValueError(\n 'Unable to determine microns/pixels scaling because objective magnification not 50X or 20X'\n )\n if pixel_to_micron_scaling is not None:\n print(\n 'Manual input of pixel_to_micron_scaling is deprecated. A scaling factor of {} um/pix for {} magnification was instantiated.'\n .format(self.pixel_to_micron_scaling, self.microscope.\n objective.magnification))\n \"\"\"\n --- I THINK THIS SECTION IS DEPRECATED ---\n Notes: deprecated because calculating the scaling factor or entering it manually is too confusing. I have\n permanently figured out the correct scaling.\n \n if microscope.objective.pixel_to_micron is not None and pixel_to_micron_scaling is None:\n self.pixel_to_micron = microscope.objective.pixel_to_micron\n elif microscope.objective.pixel_to_micron is not None and pixel_to_micron_scaling is not None and microscope.objective.pixel_to_micron != pixel_to_micron_scaling:\n raise ValueError(\"Conflicting scaling factors: microscope.objective={}, optics={}\".format(microscope.objective.pixel_to_micron, pixel_to_micron_scaling))\n elif microscope.objective.pixel_to_micron is None and pixel_to_micron_scaling is not None:\n self.pixel_to_micron = pixel_to_micron_scaling\n \"\"\"\n\n\nclass illumination(object):\n\n def __init__(self, basePath=None, source=None, excitation=None,\n emission=None, dichroic=None, illumination_distribution=None,\n calculate_illumination_distribution=False, illumPath=None,\n illumSavePath=None, illumSaveName=None, showIllumPlot=False,\n save_txt=False, save_plot=False, save_image=False):\n \"\"\"\n details about the optical setup\n :param source:\n :param excitation:\n :param emission:\n :param dichroic:\n \"\"\"\n self.basePath = basePath\n self.source = source\n self.excitation_wavelength = excitation\n self.emission_wavelength = emission\n self.dichroic = dichroic\n if illumination_distribution is not None:\n self.illumination_distribution = illumination_distribution\n elif illumPath is not None:\n flatfield = io.imread(illumPath, plugin='tifffile')\n if len(np.shape(flatfield)) > 2:\n flatfield = np.asarray(np.rint(np.mean(flatfield, axis=0)),\n dtype='uint16')\n self.illumination_distribution = flatfield\n elif calculate_illumination_distribution and illumination_distribution is None:\n self.illumination_distribution = measureIlluminationDistributionXY(\n basePath=self.basePath, illumPath=illumPath, show_image=\n showIllumPlot, save_image=save_image, save_img_type='.tif',\n save_txt=save_txt, show_plot=showIllumPlot, save_plot=\n save_plot, savePath=illumSavePath, savename=illumSaveName)\n else:\n self.illumination_distribution = illumination_distribution\n self.flatfield = self.illumination_distribution\n if self.flatfield is not None:\n self.flatfield_mean = np.mean(self.flatfield)\n self.flatfield_std = np.std(self.flatfield)\n\n\nclass darkfield(object):\n\n def __init__(self, basePath, darkframePath=None, flip_image_across_axis\n =None, show_image=False, save_image=False, save_img_type='.tif',\n savePath=None, savename=None, save_plot=False):\n \"\"\"\n details about dark field image\n\n \"\"\"\n self.basePath = basePath\n img, mean, std = calculate_darkfield(self.basePath, darkframePath=\n darkframePath, flip_image_axes=flip_image_across_axis,\n show_image=show_image, save_image=save_image, save_img_type=\n save_img_type, savePath=savePath, savename=savename, save_plot=\n save_plot)\n self.img = img\n self.mean = mean\n self.std = std\n\n\nclass microscope(object):\n\n def __init__(self, type, objective, illumination, ccd):\n \"\"\"\n describes the micrscope setup\n :param type:\n :param objective:\n \"\"\"\n self.type = type\n self.objective = objective\n self.illumination = illumination\n self.ccd = ccd\n\n\nclass ccd(object):\n\n def __init__(self, exposure_time, img_acq_rate, EM_gain, name=\n 'iXon Ultra 897', img_acq_type='emcdd', darkfield=None, binning=\n None, vertical_pixel_shift_speed=5e-07,\n horizontal_pixel_shift_speed=1e-07,\n horizontal_pixel_shift_rate_bits=14, frame_transfer=True, crop_mode\n =False, acquisition_mode='kinetic', triggering='internal',\n readout_mode='image', pixels=512, pixel_size=1.6e-05):\n \"\"\"\n describe the CCD class\n \"\"\"\n self.name = name\n self.img_acq_type = img_acq_type\n self.exposure_time = exposure_time\n self.img_acq_rate = img_acq_rate\n self.em_gain = EM_gain\n self.darkfield = darkfield\n self.binning = binning\n self.vpss = vertical_pixel_shift_speed\n self.hpss = horizontal_pixel_shift_speed\n self.hpss_bits = horizontal_pixel_shift_rate_bits\n self.frame_transfer = frame_transfer\n self.crop_mode = crop_mode\n self.acquisition_mode = acquisition_mode\n self.triggering = triggering\n self.readout_mode = readout_mode\n if isinstance(pixels, int):\n self.pixels = pixels, pixels\n else:\n self.pixels = pixels\n self.pixel_size = pixel_size\n self.image_area = self.pixels[0] * pixel_size, self.pixels[1\n ] * pixel_size\n\n\nclass objective(object):\n\n def __init__(self, fluoro_particle, name=None, numerical_aperture=None,\n magnification=None, basePath=None, channel_height=None,\n illumination=None, wavelength=None, microgrid=None,\n auto_calc_pix_to_micron_scaling=False, pixel_to_micron=None,\n field_number=None, n0=1, show_depth_plot=False, save_depth_plot=False):\n \"\"\"\n\n Objectives in the Pennathur Lab Dark Room uScope:\n\n 20X - LCPlanFL N 20X LCD [LCPLFLN20xLCD]\n magnification: 20\n numerical_aperture: 0.45\n field_number: 26.5\n working distance: 7.4 - 8.3 mm\n transmittance: 90% @ 425 - 670 nm\n correction collar: 0 - 1.2 mm\n microns per pixel: 1.55\n 50X - LCPlanFL N 50x LCD [LCPLFLN50xLCD]\n magnification: 50\n numerical aperture: 0.7\n field number: 26.5\n working distance: 2.2 - 3 mm\n transmittance: 90% @ 425 - 650 nm\n correction collar: 0 - 1.2 mm\n microns per pixel: 0.6\n\n Manufacturer website: https://www.olympus-ims.com/en/microscope/lcplfln-lcd/#!cms[focus]=cmsContent11428\n \"\"\"\n self.name = name\n if name == 'LCPLFLN20xLCD':\n self.magnification = 20\n self.numerical_aperture = 0.45\n self.field_number = 26.5\n self.transmittance = 0.9\n self.pixel_to_micron = 1.55\n elif name == 'LCPLFLN50xLCD':\n self.magnification = 50\n self.numerical_aperture = 0.7\n self.field_number = 26.5\n self.transmittance = 0.9\n self.pixel_to_micron = 0.6\n else:\n self.numerical_aperture = numerical_aperture\n self.magnification = magnification\n self.field_number = field_number\n self._illumination = illumination\n if self._illumination is not None:\n self._wavelength = self._illumination.emission_wavelength\n elif wavelength is not None:\n self._wavelength = wavelength\n else:\n raise ValueError(\n 'A wavelength is required via the <illumination> class or <wavelength> input parameter'\n )\n self._pd = fluoro_particle.diameter\n self._n0 = n0\n self.calculate_depth_of_field()\n self.calculate_depth_of_correlation()\n if field_number:\n self.calculate_field_of_view()\n if show_depth_plot or save_depth_plot:\n plot_field_depth(depth_of_corr=self.depth_of_correlation,\n depth_of_field=self.depth_of_field, show_depth_plot=\n show_depth_plot, save_depth_plot=save_depth_plot, basePath=\n basePath, savename=None, channel_height=channel_height,\n objective=self.magnification)\n if auto_calc_pix_to_micron_scaling and self.pixel_to_micron is None:\n self.microgrid = microgrid\n self.calculate_pixel_to_micron_scaling()\n\n def calculate_field_of_view(self):\n self.field_of_view = self.field_number / self.magnification\n\n def calculate_depth_of_field(self, e=1.6e-05, n=1):\n \"\"\"\n e: CCD pixel resolution example: e = 16 um (16 microns is the pixel size)\n \"\"\"\n self.depth_of_field = (self._wavelength * n / self.\n numerical_aperture ** 2 + e * n / (self.magnification * self.\n numerical_aperture))\n\n def calculate_depth_of_correlation(self, eps=0.01):\n n = self._n0\n dp = self._pd\n NA = self.numerical_aperture\n M = self.magnification\n lmbda = self._wavelength\n depth_of_correlation = calculate_depth_of_correlation(M=M, NA=NA,\n dp=dp, n=n, lmbda=lmbda, eps=eps)\n self.depth_of_correlation = depth_of_correlation\n\n def calculate_pixel_to_micron_scaling(self):\n if self.microgrid is None:\n raise ValueError(\n 'Need objective.microgrid property in order to calculate scaling factor'\n )\n\n @property\n def NA(self):\n return self.numerical_aperture\n\n @property\n def M(self):\n return self.magnification\n\n\nclass microgrid(object):\n\n def __init__(self, gridPath=None, center_to_center_spacing=None,\n feature_width=None, grid_type='grid', show_grid=False):\n \"\"\"\n this class holds images for the microgrid and performs pixel to micron scaling calculations\n \"\"\"\n if gridPath is not None:\n self.gridPath = gridPath\n self.spacing = center_to_center_spacing\n self.width = feature_width\n self.grid_type = grid_type\n file_list = glob.glob(join(self.gridPath, 'grid*.tif'))\n if len(file_list) < 1:\n raise ValueError('No grid*.tif files found in {}'.format(\n self.gridPath))\n img_grid = np.zeros(shape=(512, 512))\n for f in file_list:\n img = io.imread(f, plugin='tifffile')\n if len(np.shape(img)) > 2:\n img = np.mean(img, axis=0)\n img_grid += img\n img_grid = img_grid / len(file_list)\n self.img_grid = img_grid\n if show_grid is True:\n fig, ax = plt.subplots()\n ax.imshow(img_grid, cmap='gray')\n ax.set_xlabel('pixels')\n ax.set_ylabel('pixels')\n plt.title('grid: 10 um Lines; 50 um Spacing')\n plt.show()\n\n\nclass fluorescent_particles(object):\n\n def __init__(self, name=None, materials=None, diameter=None,\n fluorescence_spectra=None, concentration=None,\n electrophoretic_mobility=None, zeta=None):\n \"\"\"\n the details of the fluroescent particles used\n :param materials:\n :param diameter:\n :param fluorescence_spectra:\n :param concentration:\n :param electrophoretic_mobility:\n :param zeta:\n \"\"\"\n self.name = name\n self.materials = materials\n self.concentration = concentration\n self.electrophoretic_mobility = electrophoretic_mobility\n self.zeta = zeta\n self.diameter = diameter\n if diameter:\n k_b = 1.3806e-23\n T = 298\n mu = 0.001\n self.diffusivity = k_b * T / (6 * np.pi * mu * diameter / 2)\n self.fluorescence_spectra = fluorescence_spectra\n\n\nclass reservoir(object):\n\n def __init__(self, diameter, height, height_of_reservoir=None, material\n =None):\n \"\"\"\n describes the micrscope setup\n :param type:\n :param objective:\n \"\"\"\n g = 9.81\n self.material = material\n self.diameter = diameter\n self.height = height\n self.volume = np.pi * self.diameter ** 2 / 4\n self.height_of_reservoir = height_of_reservoir\n if material and height_of_reservoir:\n self.hydrostatic_pressure = (material.density * g * self.\n height_of_reservoir)\n\n\nclass fluid_handling_system(object):\n\n def __init__(self, fluid_reservoir=None, all_tubing=None,\n onchip_reservoir=None):\n \"\"\"\n describes the fluid handling system\n \"\"\"\n self.fluid_reservoir = fluid_reservoir\n self.all_tubing = all_tubing\n self.onchip_reservoir = onchip_reservoir\n\n\nclass tubing(object):\n\n def __init__(self, inner_diameter=None, length=None, material=None):\n \"\"\"\n describes each segment of tubing\n\n \"\"\"\n self.inner_diameter = inner_diameter\n self.length = length\n self.material = material\n\n\nclass optical_element(object):\n\n def __init__(self, passing_wavelengths=None, reflectivity=None):\n \"\"\"\n this class describes the optical characteristics of any material or element\n :param wavelength_bandpass:\n \"\"\"\n self.passing_wavelengths = passing_wavelengths\n self.reflectivity = reflectivity\n\n\nclass measurable_quantity(object):\n\n def __init__(self, reference_value=None, measured_value=None):\n \"\"\"\n what value was measured and when\n \"\"\"\n self.reference_value = reference_value\n self.measured_value = measured_value\n\n\nclass measurement(object):\n\n def __init__(self, value=None, date=None):\n \"\"\"\n Object for storing measurements\n :param value:\n :param date:\n \"\"\"\n self.value = value\n self.date = date\n\n\nclass electrode_configuration(object):\n\n def __init__(self, material=None, length=None, entrance_length=None):\n \"\"\"\n Object for holding electrode configuration details\n :param material:\n :param length:\n :param entrance_length:\n \"\"\"\n self.material = material\n self.length = length\n self.entrance_length = entrance_length\n\n\nclass material_solid(object):\n\n def __init__(self, name=None, zeta=None, concentration=None,\n index_of_refraction=None, transparency=None, fluorescence_spectra=\n None, permittivity=None, conductivity=None, thickness=None,\n youngs_modulus=None, poissons_ratio=None, density=None,\n dielectric_strength=None, reaction_site_density=None, Ka=None, Kb=\n None, width=None, length=None):\n \"\"\"\n everything about a material\n :param transparency:\n :param fluorescence_spectra:\n :param zeta:\n \"\"\"\n self.name = name\n self.length = length\n self.width = width\n self.thickness = thickness\n self.density = density\n self.concentration = concentration\n self.youngs_modulus = youngs_modulus\n self.poissons_ratio = poissons_ratio\n self.index_of_refraction = index_of_refraction\n self.fluorescence_spectra = fluorescence_spectra\n self.transparency = transparency\n if self.transparency:\n self.reflectivity = 1 / self.transparency\n self.conductivity = conductivity\n if permittivity:\n self.permittivity = permittivity\n self.zeta = zeta\n self.dielectric_strength = dielectric_strength\n if reaction_site_density:\n self.reaction_site_density = reaction_site_density * 1e+18\n self.Ka = Ka\n self.Kb = Kb\n\n\nclass material_liquid(object):\n\n def __init__(self, name=None, species=None, concentration=None,\n conductivity=None, pH=None, density=None, viscosity=None,\n permittivity=None, temperature=None, valence=1.0):\n \"\"\"\n everything about a liquid\n :param species:\n :param concentration:\n :param conductivity:\n :param pH:\n \"\"\"\n self.name = name\n self.species = species\n self.concentration = concentration\n self.conductivity = conductivity\n if permittivity:\n self.permittivity = permittivity\n if pH:\n self.pH = pH\n self.c_H = 10 ** -pH * 1000.0\n self.valence = valence\n self.density = density\n self.viscosity = viscosity\n self.temperature = temperature\n self.diffusivity = 2e-09\n",
"step-3": "<mask token>\n\n\nclass chip(object):\n <mask token>\n\n\nclass channel(object):\n\n def __init__(self, length=None, width=None, height=None,\n material_bottom_wall_surface=None, material_top_wall_surface=None,\n material_fluid=None):\n \"\"\"\n Everything important about the chip\n \"\"\"\n self.length = length\n self.width = width\n self.height = height\n self.material_bottom_wall_surface = material_bottom_wall_surface\n self.material_top_wall_surface = material_top_wall_surface\n self.material_fluid = material_fluid\n\n\nclass bpe(object):\n\n def __init__(self, length=None, width=None, height=None, material=None,\n adhesion_material=None, dielectric_coating=None):\n \"\"\"\n Everything important about the chip\n \"\"\"\n self.length = length\n self.linspace_x = np.linspace(-length / 2, length / 2, num=100)\n self.width = width\n self.height = height\n self.material = material\n if self.material.thickness:\n if self.material.thickness != self.height:\n raise ValueError('BPE height must equal BPE material thickness'\n )\n self.adhesion_material = adhesion_material\n if dielectric_coating:\n self.dielectric_coating = dielectric_coating\n else:\n self.dielectric_coating = material_solid(name='no_dielectric',\n permittivity=1, thickness=1e-12, Ka=6, Kb=2,\n reaction_site_density=5)\n\n\nclass optics(object):\n\n def __init__(self, microscope, fluorescent_particles=None,\n calibration_grid=None, pixel_to_micron_scaling=None):\n self.microscope = microscope\n self.fluorescent_particles = fluorescent_particles\n self.calibration_grid = calibration_grid\n if self.microscope.objective.magnification == 50:\n self.pixel_to_micron_scaling = 0.6\n elif self.microscope.objective.magnification == 20:\n self.pixel_to_micron_scaling = 1.55\n else:\n raise ValueError(\n 'Unable to determine microns/pixels scaling because objective magnification not 50X or 20X'\n )\n if pixel_to_micron_scaling is not None:\n print(\n 'Manual input of pixel_to_micron_scaling is deprecated. A scaling factor of {} um/pix for {} magnification was instantiated.'\n .format(self.pixel_to_micron_scaling, self.microscope.\n objective.magnification))\n \"\"\"\n --- I THINK THIS SECTION IS DEPRECATED ---\n Notes: deprecated because calculating the scaling factor or entering it manually is too confusing. I have\n permanently figured out the correct scaling.\n \n if microscope.objective.pixel_to_micron is not None and pixel_to_micron_scaling is None:\n self.pixel_to_micron = microscope.objective.pixel_to_micron\n elif microscope.objective.pixel_to_micron is not None and pixel_to_micron_scaling is not None and microscope.objective.pixel_to_micron != pixel_to_micron_scaling:\n raise ValueError(\"Conflicting scaling factors: microscope.objective={}, optics={}\".format(microscope.objective.pixel_to_micron, pixel_to_micron_scaling))\n elif microscope.objective.pixel_to_micron is None and pixel_to_micron_scaling is not None:\n self.pixel_to_micron = pixel_to_micron_scaling\n \"\"\"\n\n\nclass illumination(object):\n\n def __init__(self, basePath=None, source=None, excitation=None,\n emission=None, dichroic=None, illumination_distribution=None,\n calculate_illumination_distribution=False, illumPath=None,\n illumSavePath=None, illumSaveName=None, showIllumPlot=False,\n save_txt=False, save_plot=False, save_image=False):\n \"\"\"\n details about the optical setup\n :param source:\n :param excitation:\n :param emission:\n :param dichroic:\n \"\"\"\n self.basePath = basePath\n self.source = source\n self.excitation_wavelength = excitation\n self.emission_wavelength = emission\n self.dichroic = dichroic\n if illumination_distribution is not None:\n self.illumination_distribution = illumination_distribution\n elif illumPath is not None:\n flatfield = io.imread(illumPath, plugin='tifffile')\n if len(np.shape(flatfield)) > 2:\n flatfield = np.asarray(np.rint(np.mean(flatfield, axis=0)),\n dtype='uint16')\n self.illumination_distribution = flatfield\n elif calculate_illumination_distribution and illumination_distribution is None:\n self.illumination_distribution = measureIlluminationDistributionXY(\n basePath=self.basePath, illumPath=illumPath, show_image=\n showIllumPlot, save_image=save_image, save_img_type='.tif',\n save_txt=save_txt, show_plot=showIllumPlot, save_plot=\n save_plot, savePath=illumSavePath, savename=illumSaveName)\n else:\n self.illumination_distribution = illumination_distribution\n self.flatfield = self.illumination_distribution\n if self.flatfield is not None:\n self.flatfield_mean = np.mean(self.flatfield)\n self.flatfield_std = np.std(self.flatfield)\n\n\nclass darkfield(object):\n\n def __init__(self, basePath, darkframePath=None, flip_image_across_axis\n =None, show_image=False, save_image=False, save_img_type='.tif',\n savePath=None, savename=None, save_plot=False):\n \"\"\"\n details about dark field image\n\n \"\"\"\n self.basePath = basePath\n img, mean, std = calculate_darkfield(self.basePath, darkframePath=\n darkframePath, flip_image_axes=flip_image_across_axis,\n show_image=show_image, save_image=save_image, save_img_type=\n save_img_type, savePath=savePath, savename=savename, save_plot=\n save_plot)\n self.img = img\n self.mean = mean\n self.std = std\n\n\nclass microscope(object):\n\n def __init__(self, type, objective, illumination, ccd):\n \"\"\"\n describes the micrscope setup\n :param type:\n :param objective:\n \"\"\"\n self.type = type\n self.objective = objective\n self.illumination = illumination\n self.ccd = ccd\n\n\nclass ccd(object):\n\n def __init__(self, exposure_time, img_acq_rate, EM_gain, name=\n 'iXon Ultra 897', img_acq_type='emcdd', darkfield=None, binning=\n None, vertical_pixel_shift_speed=5e-07,\n horizontal_pixel_shift_speed=1e-07,\n horizontal_pixel_shift_rate_bits=14, frame_transfer=True, crop_mode\n =False, acquisition_mode='kinetic', triggering='internal',\n readout_mode='image', pixels=512, pixel_size=1.6e-05):\n \"\"\"\n describe the CCD class\n \"\"\"\n self.name = name\n self.img_acq_type = img_acq_type\n self.exposure_time = exposure_time\n self.img_acq_rate = img_acq_rate\n self.em_gain = EM_gain\n self.darkfield = darkfield\n self.binning = binning\n self.vpss = vertical_pixel_shift_speed\n self.hpss = horizontal_pixel_shift_speed\n self.hpss_bits = horizontal_pixel_shift_rate_bits\n self.frame_transfer = frame_transfer\n self.crop_mode = crop_mode\n self.acquisition_mode = acquisition_mode\n self.triggering = triggering\n self.readout_mode = readout_mode\n if isinstance(pixels, int):\n self.pixels = pixels, pixels\n else:\n self.pixels = pixels\n self.pixel_size = pixel_size\n self.image_area = self.pixels[0] * pixel_size, self.pixels[1\n ] * pixel_size\n\n\nclass objective(object):\n\n def __init__(self, fluoro_particle, name=None, numerical_aperture=None,\n magnification=None, basePath=None, channel_height=None,\n illumination=None, wavelength=None, microgrid=None,\n auto_calc_pix_to_micron_scaling=False, pixel_to_micron=None,\n field_number=None, n0=1, show_depth_plot=False, save_depth_plot=False):\n \"\"\"\n\n Objectives in the Pennathur Lab Dark Room uScope:\n\n 20X - LCPlanFL N 20X LCD [LCPLFLN20xLCD]\n magnification: 20\n numerical_aperture: 0.45\n field_number: 26.5\n working distance: 7.4 - 8.3 mm\n transmittance: 90% @ 425 - 670 nm\n correction collar: 0 - 1.2 mm\n microns per pixel: 1.55\n 50X - LCPlanFL N 50x LCD [LCPLFLN50xLCD]\n magnification: 50\n numerical aperture: 0.7\n field number: 26.5\n working distance: 2.2 - 3 mm\n transmittance: 90% @ 425 - 650 nm\n correction collar: 0 - 1.2 mm\n microns per pixel: 0.6\n\n Manufacturer website: https://www.olympus-ims.com/en/microscope/lcplfln-lcd/#!cms[focus]=cmsContent11428\n \"\"\"\n self.name = name\n if name == 'LCPLFLN20xLCD':\n self.magnification = 20\n self.numerical_aperture = 0.45\n self.field_number = 26.5\n self.transmittance = 0.9\n self.pixel_to_micron = 1.55\n elif name == 'LCPLFLN50xLCD':\n self.magnification = 50\n self.numerical_aperture = 0.7\n self.field_number = 26.5\n self.transmittance = 0.9\n self.pixel_to_micron = 0.6\n else:\n self.numerical_aperture = numerical_aperture\n self.magnification = magnification\n self.field_number = field_number\n self._illumination = illumination\n if self._illumination is not None:\n self._wavelength = self._illumination.emission_wavelength\n elif wavelength is not None:\n self._wavelength = wavelength\n else:\n raise ValueError(\n 'A wavelength is required via the <illumination> class or <wavelength> input parameter'\n )\n self._pd = fluoro_particle.diameter\n self._n0 = n0\n self.calculate_depth_of_field()\n self.calculate_depth_of_correlation()\n if field_number:\n self.calculate_field_of_view()\n if show_depth_plot or save_depth_plot:\n plot_field_depth(depth_of_corr=self.depth_of_correlation,\n depth_of_field=self.depth_of_field, show_depth_plot=\n show_depth_plot, save_depth_plot=save_depth_plot, basePath=\n basePath, savename=None, channel_height=channel_height,\n objective=self.magnification)\n if auto_calc_pix_to_micron_scaling and self.pixel_to_micron is None:\n self.microgrid = microgrid\n self.calculate_pixel_to_micron_scaling()\n\n def calculate_field_of_view(self):\n self.field_of_view = self.field_number / self.magnification\n\n def calculate_depth_of_field(self, e=1.6e-05, n=1):\n \"\"\"\n e: CCD pixel resolution example: e = 16 um (16 microns is the pixel size)\n \"\"\"\n self.depth_of_field = (self._wavelength * n / self.\n numerical_aperture ** 2 + e * n / (self.magnification * self.\n numerical_aperture))\n\n def calculate_depth_of_correlation(self, eps=0.01):\n n = self._n0\n dp = self._pd\n NA = self.numerical_aperture\n M = self.magnification\n lmbda = self._wavelength\n depth_of_correlation = calculate_depth_of_correlation(M=M, NA=NA,\n dp=dp, n=n, lmbda=lmbda, eps=eps)\n self.depth_of_correlation = depth_of_correlation\n\n def calculate_pixel_to_micron_scaling(self):\n if self.microgrid is None:\n raise ValueError(\n 'Need objective.microgrid property in order to calculate scaling factor'\n )\n\n @property\n def NA(self):\n return self.numerical_aperture\n\n @property\n def M(self):\n return self.magnification\n\n\nclass microgrid(object):\n\n def __init__(self, gridPath=None, center_to_center_spacing=None,\n feature_width=None, grid_type='grid', show_grid=False):\n \"\"\"\n this class holds images for the microgrid and performs pixel to micron scaling calculations\n \"\"\"\n if gridPath is not None:\n self.gridPath = gridPath\n self.spacing = center_to_center_spacing\n self.width = feature_width\n self.grid_type = grid_type\n file_list = glob.glob(join(self.gridPath, 'grid*.tif'))\n if len(file_list) < 1:\n raise ValueError('No grid*.tif files found in {}'.format(\n self.gridPath))\n img_grid = np.zeros(shape=(512, 512))\n for f in file_list:\n img = io.imread(f, plugin='tifffile')\n if len(np.shape(img)) > 2:\n img = np.mean(img, axis=0)\n img_grid += img\n img_grid = img_grid / len(file_list)\n self.img_grid = img_grid\n if show_grid is True:\n fig, ax = plt.subplots()\n ax.imshow(img_grid, cmap='gray')\n ax.set_xlabel('pixels')\n ax.set_ylabel('pixels')\n plt.title('grid: 10 um Lines; 50 um Spacing')\n plt.show()\n\n\nclass fluorescent_particles(object):\n\n def __init__(self, name=None, materials=None, diameter=None,\n fluorescence_spectra=None, concentration=None,\n electrophoretic_mobility=None, zeta=None):\n \"\"\"\n the details of the fluroescent particles used\n :param materials:\n :param diameter:\n :param fluorescence_spectra:\n :param concentration:\n :param electrophoretic_mobility:\n :param zeta:\n \"\"\"\n self.name = name\n self.materials = materials\n self.concentration = concentration\n self.electrophoretic_mobility = electrophoretic_mobility\n self.zeta = zeta\n self.diameter = diameter\n if diameter:\n k_b = 1.3806e-23\n T = 298\n mu = 0.001\n self.diffusivity = k_b * T / (6 * np.pi * mu * diameter / 2)\n self.fluorescence_spectra = fluorescence_spectra\n\n\nclass reservoir(object):\n\n def __init__(self, diameter, height, height_of_reservoir=None, material\n =None):\n \"\"\"\n describes the micrscope setup\n :param type:\n :param objective:\n \"\"\"\n g = 9.81\n self.material = material\n self.diameter = diameter\n self.height = height\n self.volume = np.pi * self.diameter ** 2 / 4\n self.height_of_reservoir = height_of_reservoir\n if material and height_of_reservoir:\n self.hydrostatic_pressure = (material.density * g * self.\n height_of_reservoir)\n\n\nclass fluid_handling_system(object):\n\n def __init__(self, fluid_reservoir=None, all_tubing=None,\n onchip_reservoir=None):\n \"\"\"\n describes the fluid handling system\n \"\"\"\n self.fluid_reservoir = fluid_reservoir\n self.all_tubing = all_tubing\n self.onchip_reservoir = onchip_reservoir\n\n\nclass tubing(object):\n\n def __init__(self, inner_diameter=None, length=None, material=None):\n \"\"\"\n describes each segment of tubing\n\n \"\"\"\n self.inner_diameter = inner_diameter\n self.length = length\n self.material = material\n\n\nclass optical_element(object):\n\n def __init__(self, passing_wavelengths=None, reflectivity=None):\n \"\"\"\n this class describes the optical characteristics of any material or element\n :param wavelength_bandpass:\n \"\"\"\n self.passing_wavelengths = passing_wavelengths\n self.reflectivity = reflectivity\n\n\nclass measurable_quantity(object):\n\n def __init__(self, reference_value=None, measured_value=None):\n \"\"\"\n what value was measured and when\n \"\"\"\n self.reference_value = reference_value\n self.measured_value = measured_value\n\n\nclass measurement(object):\n\n def __init__(self, value=None, date=None):\n \"\"\"\n Object for storing measurements\n :param value:\n :param date:\n \"\"\"\n self.value = value\n self.date = date\n\n\nclass electrode_configuration(object):\n\n def __init__(self, material=None, length=None, entrance_length=None):\n \"\"\"\n Object for holding electrode configuration details\n :param material:\n :param length:\n :param entrance_length:\n \"\"\"\n self.material = material\n self.length = length\n self.entrance_length = entrance_length\n\n\nclass material_solid(object):\n\n def __init__(self, name=None, zeta=None, concentration=None,\n index_of_refraction=None, transparency=None, fluorescence_spectra=\n None, permittivity=None, conductivity=None, thickness=None,\n youngs_modulus=None, poissons_ratio=None, density=None,\n dielectric_strength=None, reaction_site_density=None, Ka=None, Kb=\n None, width=None, length=None):\n \"\"\"\n everything about a material\n :param transparency:\n :param fluorescence_spectra:\n :param zeta:\n \"\"\"\n self.name = name\n self.length = length\n self.width = width\n self.thickness = thickness\n self.density = density\n self.concentration = concentration\n self.youngs_modulus = youngs_modulus\n self.poissons_ratio = poissons_ratio\n self.index_of_refraction = index_of_refraction\n self.fluorescence_spectra = fluorescence_spectra\n self.transparency = transparency\n if self.transparency:\n self.reflectivity = 1 / self.transparency\n self.conductivity = conductivity\n if permittivity:\n self.permittivity = permittivity\n self.zeta = zeta\n self.dielectric_strength = dielectric_strength\n if reaction_site_density:\n self.reaction_site_density = reaction_site_density * 1e+18\n self.Ka = Ka\n self.Kb = Kb\n\n\nclass material_liquid(object):\n\n def __init__(self, name=None, species=None, concentration=None,\n conductivity=None, pH=None, density=None, viscosity=None,\n permittivity=None, temperature=None, valence=1.0):\n \"\"\"\n everything about a liquid\n :param species:\n :param concentration:\n :param conductivity:\n :param pH:\n \"\"\"\n self.name = name\n self.species = species\n self.concentration = concentration\n self.conductivity = conductivity\n if permittivity:\n self.permittivity = permittivity\n if pH:\n self.pH = pH\n self.c_H = 10 ** -pH * 1000.0\n self.valence = valence\n self.density = density\n self.viscosity = viscosity\n self.temperature = temperature\n self.diffusivity = 2e-09\n",
"step-4": "<mask token>\n\n\nclass CurlypivTestSetup(object):\n\n def __init__(self, name, chip, optics, fluid_handling_system):\n \"\"\"\n All the \"settings\" used in the experimental setup:\n 1. chip (class)\n 1.1 solid material (class) (e.g. SiO2)\n 1.1.1 transparency\n 1.1.2 fluorescence spectral characteristics\n 1.1.3 surface charge density\n 1.1.4 %/vol (here would be 100%)\n 1.2 channel (class)\n 1.2.1 height\n 1.2.2 width\n 1.2.3 length\n 1.3 reservoir volume\n 1.4 electrode configuration (class)\n 1.4.1 material\n 1.4.2 separation distance\n 1.4.3 distance to channel entrance\n 2. test solution (class)\n 2.1 liquid material (class) (e.g. electrolyte)\n 2.1.1 chemical species (e.g. KCl)\n 2.1.2 concentration\n 2.1.3 measurable quantity (class) (e.g. conductivity)\n 2.1.3.1 theoretical\n 2.1.3.2 measured\n 2.1.3.2.1 measured conductivity\n 2.1.3.2.1 measured date\n 2.1.4 measurable quantity (class) (e.g. pH)\n 2.1.4.1 theoretical\n 2.1.4.2 measured\n 2.1.4.2.1 measured conductivity\n 2.1.4.2.1 measured date\n 2.2 fluorescent particles (class)\n 2.2.0 diameter\n 2.2.. measurable quantity (class) (e.g. zeta)\n 2.2.. measurable quantity (class) (e.g electrophoretic mobility)\n 2.2.. spectral characteristics\n 2.2.1 solid materials (class) (e.g. polystyrene)\n 2.2.1.1 %/vol\n 2.2.2 liquid materials (class) (e.g. DI water)\n 2.2.3 liquid materials (Class) (e.g. sodium azide)\n 2.2.3.1 conductivity\n 2.2.3.2 concentration\n 3. illumination (class)\n 3.1 source (class)\n 3.1.1 type (e.g. Hg lamp)\n 3.1.2 intensity\n 3.1.3 emission spectra\n 3.2 optical element (class) (e.g. excitation filter)\n 3.3 optical element (class) (e.g. emission filter)\n 3.4 optical element (class) (e.g. dichroic mirror)\n 4. microscope\n 4.1 type (Olympus iX 73)\n 4.2 objective (class)\n 4.2.1 numerical aperature (e.g. 0.3)\n 4.2.2 magnification (e.g. 20X)\n 4.2.3 field of view (e.g. 500 x 500 um)\n 4.2.4 depth of focus (e.g 4.1 microns)\n \"\"\"\n self.name = name\n self.chip = chip\n self.optics = optics\n self.fluid_handling_system = fluid_handling_system\n\n\nclass chip(object):\n\n def __init__(self, channel=None, bpe=None, reservoir=None, electrodes=\n None, fluid_handling_system=None, material_in_optical_path=None,\n thickness_in_optical_path=None):\n \"\"\"\n Everything important about the chip\n \"\"\"\n self.channel = channel\n self.bpe = bpe\n self.electrodes = electrodes\n self.fluid_handling_system = fluid_handling_system\n self.material_in_optical_path = material_in_optical_path\n self.thickness_in_optical_path = thickness_in_optical_path\n\n\nclass channel(object):\n\n def __init__(self, length=None, width=None, height=None,\n material_bottom_wall_surface=None, material_top_wall_surface=None,\n material_fluid=None):\n \"\"\"\n Everything important about the chip\n \"\"\"\n self.length = length\n self.width = width\n self.height = height\n self.material_bottom_wall_surface = material_bottom_wall_surface\n self.material_top_wall_surface = material_top_wall_surface\n self.material_fluid = material_fluid\n\n\nclass bpe(object):\n\n def __init__(self, length=None, width=None, height=None, material=None,\n adhesion_material=None, dielectric_coating=None):\n \"\"\"\n Everything important about the chip\n \"\"\"\n self.length = length\n self.linspace_x = np.linspace(-length / 2, length / 2, num=100)\n self.width = width\n self.height = height\n self.material = material\n if self.material.thickness:\n if self.material.thickness != self.height:\n raise ValueError('BPE height must equal BPE material thickness'\n )\n self.adhesion_material = adhesion_material\n if dielectric_coating:\n self.dielectric_coating = dielectric_coating\n else:\n self.dielectric_coating = material_solid(name='no_dielectric',\n permittivity=1, thickness=1e-12, Ka=6, Kb=2,\n reaction_site_density=5)\n\n\nclass optics(object):\n\n def __init__(self, microscope, fluorescent_particles=None,\n calibration_grid=None, pixel_to_micron_scaling=None):\n self.microscope = microscope\n self.fluorescent_particles = fluorescent_particles\n self.calibration_grid = calibration_grid\n if self.microscope.objective.magnification == 50:\n self.pixel_to_micron_scaling = 0.6\n elif self.microscope.objective.magnification == 20:\n self.pixel_to_micron_scaling = 1.55\n else:\n raise ValueError(\n 'Unable to determine microns/pixels scaling because objective magnification not 50X or 20X'\n )\n if pixel_to_micron_scaling is not None:\n print(\n 'Manual input of pixel_to_micron_scaling is deprecated. A scaling factor of {} um/pix for {} magnification was instantiated.'\n .format(self.pixel_to_micron_scaling, self.microscope.\n objective.magnification))\n \"\"\"\n --- I THINK THIS SECTION IS DEPRECATED ---\n Notes: deprecated because calculating the scaling factor or entering it manually is too confusing. I have\n permanently figured out the correct scaling.\n \n if microscope.objective.pixel_to_micron is not None and pixel_to_micron_scaling is None:\n self.pixel_to_micron = microscope.objective.pixel_to_micron\n elif microscope.objective.pixel_to_micron is not None and pixel_to_micron_scaling is not None and microscope.objective.pixel_to_micron != pixel_to_micron_scaling:\n raise ValueError(\"Conflicting scaling factors: microscope.objective={}, optics={}\".format(microscope.objective.pixel_to_micron, pixel_to_micron_scaling))\n elif microscope.objective.pixel_to_micron is None and pixel_to_micron_scaling is not None:\n self.pixel_to_micron = pixel_to_micron_scaling\n \"\"\"\n\n\nclass illumination(object):\n\n def __init__(self, basePath=None, source=None, excitation=None,\n emission=None, dichroic=None, illumination_distribution=None,\n calculate_illumination_distribution=False, illumPath=None,\n illumSavePath=None, illumSaveName=None, showIllumPlot=False,\n save_txt=False, save_plot=False, save_image=False):\n \"\"\"\n details about the optical setup\n :param source:\n :param excitation:\n :param emission:\n :param dichroic:\n \"\"\"\n self.basePath = basePath\n self.source = source\n self.excitation_wavelength = excitation\n self.emission_wavelength = emission\n self.dichroic = dichroic\n if illumination_distribution is not None:\n self.illumination_distribution = illumination_distribution\n elif illumPath is not None:\n flatfield = io.imread(illumPath, plugin='tifffile')\n if len(np.shape(flatfield)) > 2:\n flatfield = np.asarray(np.rint(np.mean(flatfield, axis=0)),\n dtype='uint16')\n self.illumination_distribution = flatfield\n elif calculate_illumination_distribution and illumination_distribution is None:\n self.illumination_distribution = measureIlluminationDistributionXY(\n basePath=self.basePath, illumPath=illumPath, show_image=\n showIllumPlot, save_image=save_image, save_img_type='.tif',\n save_txt=save_txt, show_plot=showIllumPlot, save_plot=\n save_plot, savePath=illumSavePath, savename=illumSaveName)\n else:\n self.illumination_distribution = illumination_distribution\n self.flatfield = self.illumination_distribution\n if self.flatfield is not None:\n self.flatfield_mean = np.mean(self.flatfield)\n self.flatfield_std = np.std(self.flatfield)\n\n\nclass darkfield(object):\n\n def __init__(self, basePath, darkframePath=None, flip_image_across_axis\n =None, show_image=False, save_image=False, save_img_type='.tif',\n savePath=None, savename=None, save_plot=False):\n \"\"\"\n details about dark field image\n\n \"\"\"\n self.basePath = basePath\n img, mean, std = calculate_darkfield(self.basePath, darkframePath=\n darkframePath, flip_image_axes=flip_image_across_axis,\n show_image=show_image, save_image=save_image, save_img_type=\n save_img_type, savePath=savePath, savename=savename, save_plot=\n save_plot)\n self.img = img\n self.mean = mean\n self.std = std\n\n\nclass microscope(object):\n\n def __init__(self, type, objective, illumination, ccd):\n \"\"\"\n describes the micrscope setup\n :param type:\n :param objective:\n \"\"\"\n self.type = type\n self.objective = objective\n self.illumination = illumination\n self.ccd = ccd\n\n\nclass ccd(object):\n\n def __init__(self, exposure_time, img_acq_rate, EM_gain, name=\n 'iXon Ultra 897', img_acq_type='emcdd', darkfield=None, binning=\n None, vertical_pixel_shift_speed=5e-07,\n horizontal_pixel_shift_speed=1e-07,\n horizontal_pixel_shift_rate_bits=14, frame_transfer=True, crop_mode\n =False, acquisition_mode='kinetic', triggering='internal',\n readout_mode='image', pixels=512, pixel_size=1.6e-05):\n \"\"\"\n describe the CCD class\n \"\"\"\n self.name = name\n self.img_acq_type = img_acq_type\n self.exposure_time = exposure_time\n self.img_acq_rate = img_acq_rate\n self.em_gain = EM_gain\n self.darkfield = darkfield\n self.binning = binning\n self.vpss = vertical_pixel_shift_speed\n self.hpss = horizontal_pixel_shift_speed\n self.hpss_bits = horizontal_pixel_shift_rate_bits\n self.frame_transfer = frame_transfer\n self.crop_mode = crop_mode\n self.acquisition_mode = acquisition_mode\n self.triggering = triggering\n self.readout_mode = readout_mode\n if isinstance(pixels, int):\n self.pixels = pixels, pixels\n else:\n self.pixels = pixels\n self.pixel_size = pixel_size\n self.image_area = self.pixels[0] * pixel_size, self.pixels[1\n ] * pixel_size\n\n\nclass objective(object):\n\n def __init__(self, fluoro_particle, name=None, numerical_aperture=None,\n magnification=None, basePath=None, channel_height=None,\n illumination=None, wavelength=None, microgrid=None,\n auto_calc_pix_to_micron_scaling=False, pixel_to_micron=None,\n field_number=None, n0=1, show_depth_plot=False, save_depth_plot=False):\n \"\"\"\n\n Objectives in the Pennathur Lab Dark Room uScope:\n\n 20X - LCPlanFL N 20X LCD [LCPLFLN20xLCD]\n magnification: 20\n numerical_aperture: 0.45\n field_number: 26.5\n working distance: 7.4 - 8.3 mm\n transmittance: 90% @ 425 - 670 nm\n correction collar: 0 - 1.2 mm\n microns per pixel: 1.55\n 50X - LCPlanFL N 50x LCD [LCPLFLN50xLCD]\n magnification: 50\n numerical aperture: 0.7\n field number: 26.5\n working distance: 2.2 - 3 mm\n transmittance: 90% @ 425 - 650 nm\n correction collar: 0 - 1.2 mm\n microns per pixel: 0.6\n\n Manufacturer website: https://www.olympus-ims.com/en/microscope/lcplfln-lcd/#!cms[focus]=cmsContent11428\n \"\"\"\n self.name = name\n if name == 'LCPLFLN20xLCD':\n self.magnification = 20\n self.numerical_aperture = 0.45\n self.field_number = 26.5\n self.transmittance = 0.9\n self.pixel_to_micron = 1.55\n elif name == 'LCPLFLN50xLCD':\n self.magnification = 50\n self.numerical_aperture = 0.7\n self.field_number = 26.5\n self.transmittance = 0.9\n self.pixel_to_micron = 0.6\n else:\n self.numerical_aperture = numerical_aperture\n self.magnification = magnification\n self.field_number = field_number\n self._illumination = illumination\n if self._illumination is not None:\n self._wavelength = self._illumination.emission_wavelength\n elif wavelength is not None:\n self._wavelength = wavelength\n else:\n raise ValueError(\n 'A wavelength is required via the <illumination> class or <wavelength> input parameter'\n )\n self._pd = fluoro_particle.diameter\n self._n0 = n0\n self.calculate_depth_of_field()\n self.calculate_depth_of_correlation()\n if field_number:\n self.calculate_field_of_view()\n if show_depth_plot or save_depth_plot:\n plot_field_depth(depth_of_corr=self.depth_of_correlation,\n depth_of_field=self.depth_of_field, show_depth_plot=\n show_depth_plot, save_depth_plot=save_depth_plot, basePath=\n basePath, savename=None, channel_height=channel_height,\n objective=self.magnification)\n if auto_calc_pix_to_micron_scaling and self.pixel_to_micron is None:\n self.microgrid = microgrid\n self.calculate_pixel_to_micron_scaling()\n\n def calculate_field_of_view(self):\n self.field_of_view = self.field_number / self.magnification\n\n def calculate_depth_of_field(self, e=1.6e-05, n=1):\n \"\"\"\n e: CCD pixel resolution example: e = 16 um (16 microns is the pixel size)\n \"\"\"\n self.depth_of_field = (self._wavelength * n / self.\n numerical_aperture ** 2 + e * n / (self.magnification * self.\n numerical_aperture))\n\n def calculate_depth_of_correlation(self, eps=0.01):\n n = self._n0\n dp = self._pd\n NA = self.numerical_aperture\n M = self.magnification\n lmbda = self._wavelength\n depth_of_correlation = calculate_depth_of_correlation(M=M, NA=NA,\n dp=dp, n=n, lmbda=lmbda, eps=eps)\n self.depth_of_correlation = depth_of_correlation\n\n def calculate_pixel_to_micron_scaling(self):\n if self.microgrid is None:\n raise ValueError(\n 'Need objective.microgrid property in order to calculate scaling factor'\n )\n\n @property\n def NA(self):\n return self.numerical_aperture\n\n @property\n def M(self):\n return self.magnification\n\n\nclass microgrid(object):\n\n def __init__(self, gridPath=None, center_to_center_spacing=None,\n feature_width=None, grid_type='grid', show_grid=False):\n \"\"\"\n this class holds images for the microgrid and performs pixel to micron scaling calculations\n \"\"\"\n if gridPath is not None:\n self.gridPath = gridPath\n self.spacing = center_to_center_spacing\n self.width = feature_width\n self.grid_type = grid_type\n file_list = glob.glob(join(self.gridPath, 'grid*.tif'))\n if len(file_list) < 1:\n raise ValueError('No grid*.tif files found in {}'.format(\n self.gridPath))\n img_grid = np.zeros(shape=(512, 512))\n for f in file_list:\n img = io.imread(f, plugin='tifffile')\n if len(np.shape(img)) > 2:\n img = np.mean(img, axis=0)\n img_grid += img\n img_grid = img_grid / len(file_list)\n self.img_grid = img_grid\n if show_grid is True:\n fig, ax = plt.subplots()\n ax.imshow(img_grid, cmap='gray')\n ax.set_xlabel('pixels')\n ax.set_ylabel('pixels')\n plt.title('grid: 10 um Lines; 50 um Spacing')\n plt.show()\n\n\nclass fluorescent_particles(object):\n\n def __init__(self, name=None, materials=None, diameter=None,\n fluorescence_spectra=None, concentration=None,\n electrophoretic_mobility=None, zeta=None):\n \"\"\"\n the details of the fluroescent particles used\n :param materials:\n :param diameter:\n :param fluorescence_spectra:\n :param concentration:\n :param electrophoretic_mobility:\n :param zeta:\n \"\"\"\n self.name = name\n self.materials = materials\n self.concentration = concentration\n self.electrophoretic_mobility = electrophoretic_mobility\n self.zeta = zeta\n self.diameter = diameter\n if diameter:\n k_b = 1.3806e-23\n T = 298\n mu = 0.001\n self.diffusivity = k_b * T / (6 * np.pi * mu * diameter / 2)\n self.fluorescence_spectra = fluorescence_spectra\n\n\nclass reservoir(object):\n\n def __init__(self, diameter, height, height_of_reservoir=None, material\n =None):\n \"\"\"\n describes the micrscope setup\n :param type:\n :param objective:\n \"\"\"\n g = 9.81\n self.material = material\n self.diameter = diameter\n self.height = height\n self.volume = np.pi * self.diameter ** 2 / 4\n self.height_of_reservoir = height_of_reservoir\n if material and height_of_reservoir:\n self.hydrostatic_pressure = (material.density * g * self.\n height_of_reservoir)\n\n\nclass fluid_handling_system(object):\n\n def __init__(self, fluid_reservoir=None, all_tubing=None,\n onchip_reservoir=None):\n \"\"\"\n describes the fluid handling system\n \"\"\"\n self.fluid_reservoir = fluid_reservoir\n self.all_tubing = all_tubing\n self.onchip_reservoir = onchip_reservoir\n\n\nclass tubing(object):\n\n def __init__(self, inner_diameter=None, length=None, material=None):\n \"\"\"\n describes each segment of tubing\n\n \"\"\"\n self.inner_diameter = inner_diameter\n self.length = length\n self.material = material\n\n\nclass optical_element(object):\n\n def __init__(self, passing_wavelengths=None, reflectivity=None):\n \"\"\"\n this class describes the optical characteristics of any material or element\n :param wavelength_bandpass:\n \"\"\"\n self.passing_wavelengths = passing_wavelengths\n self.reflectivity = reflectivity\n\n\nclass measurable_quantity(object):\n\n def __init__(self, reference_value=None, measured_value=None):\n \"\"\"\n what value was measured and when\n \"\"\"\n self.reference_value = reference_value\n self.measured_value = measured_value\n\n\nclass measurement(object):\n\n def __init__(self, value=None, date=None):\n \"\"\"\n Object for storing measurements\n :param value:\n :param date:\n \"\"\"\n self.value = value\n self.date = date\n\n\nclass electrode_configuration(object):\n\n def __init__(self, material=None, length=None, entrance_length=None):\n \"\"\"\n Object for holding electrode configuration details\n :param material:\n :param length:\n :param entrance_length:\n \"\"\"\n self.material = material\n self.length = length\n self.entrance_length = entrance_length\n\n\nclass material_solid(object):\n\n def __init__(self, name=None, zeta=None, concentration=None,\n index_of_refraction=None, transparency=None, fluorescence_spectra=\n None, permittivity=None, conductivity=None, thickness=None,\n youngs_modulus=None, poissons_ratio=None, density=None,\n dielectric_strength=None, reaction_site_density=None, Ka=None, Kb=\n None, width=None, length=None):\n \"\"\"\n everything about a material\n :param transparency:\n :param fluorescence_spectra:\n :param zeta:\n \"\"\"\n self.name = name\n self.length = length\n self.width = width\n self.thickness = thickness\n self.density = density\n self.concentration = concentration\n self.youngs_modulus = youngs_modulus\n self.poissons_ratio = poissons_ratio\n self.index_of_refraction = index_of_refraction\n self.fluorescence_spectra = fluorescence_spectra\n self.transparency = transparency\n if self.transparency:\n self.reflectivity = 1 / self.transparency\n self.conductivity = conductivity\n if permittivity:\n self.permittivity = permittivity\n self.zeta = zeta\n self.dielectric_strength = dielectric_strength\n if reaction_site_density:\n self.reaction_site_density = reaction_site_density * 1e+18\n self.Ka = Ka\n self.Kb = Kb\n\n\nclass material_liquid(object):\n\n def __init__(self, name=None, species=None, concentration=None,\n conductivity=None, pH=None, density=None, viscosity=None,\n permittivity=None, temperature=None, valence=1.0):\n \"\"\"\n everything about a liquid\n :param species:\n :param concentration:\n :param conductivity:\n :param pH:\n \"\"\"\n self.name = name\n self.species = species\n self.concentration = concentration\n self.conductivity = conductivity\n if permittivity:\n self.permittivity = permittivity\n if pH:\n self.pH = pH\n self.c_H = 10 ** -pH * 1000.0\n self.valence = valence\n self.density = density\n self.viscosity = viscosity\n self.temperature = temperature\n self.diffusivity = 2e-09\n",
"step-5": "# test CurlypivSetup\n\"\"\"\nNotes about program\n\"\"\"\n\n# 1.0 import modules\nimport numpy as np\nfrom skimage import io\nimport glob\nfrom os.path import join\nimport matplotlib.pyplot as plt\nfrom curlypiv.utils.calibrateCamera import measureIlluminationDistributionXY, calculate_depth_of_correlation, calculate_darkfield, plot_field_depth\n\n# 2.0 define class\n\nclass CurlypivTestSetup(object):\n\n def __init__(self, name, chip, optics, fluid_handling_system):\n \"\"\"\n All the \"settings\" used in the experimental setup:\n 1. chip (class)\n 1.1 solid material (class) (e.g. SiO2)\n 1.1.1 transparency\n 1.1.2 fluorescence spectral characteristics\n 1.1.3 surface charge density\n 1.1.4 %/vol (here would be 100%)\n 1.2 channel (class)\n 1.2.1 height\n 1.2.2 width\n 1.2.3 length\n 1.3 reservoir volume\n 1.4 electrode configuration (class)\n 1.4.1 material\n 1.4.2 separation distance\n 1.4.3 distance to channel entrance\n 2. test solution (class)\n 2.1 liquid material (class) (e.g. electrolyte)\n 2.1.1 chemical species (e.g. KCl)\n 2.1.2 concentration\n 2.1.3 measurable quantity (class) (e.g. conductivity)\n 2.1.3.1 theoretical\n 2.1.3.2 measured\n 2.1.3.2.1 measured conductivity\n 2.1.3.2.1 measured date\n 2.1.4 measurable quantity (class) (e.g. pH)\n 2.1.4.1 theoretical\n 2.1.4.2 measured\n 2.1.4.2.1 measured conductivity\n 2.1.4.2.1 measured date\n 2.2 fluorescent particles (class)\n 2.2.0 diameter\n 2.2.. measurable quantity (class) (e.g. zeta)\n 2.2.. measurable quantity (class) (e.g electrophoretic mobility)\n 2.2.. spectral characteristics\n 2.2.1 solid materials (class) (e.g. polystyrene)\n 2.2.1.1 %/vol\n 2.2.2 liquid materials (class) (e.g. DI water)\n 2.2.3 liquid materials (Class) (e.g. sodium azide)\n 2.2.3.1 conductivity\n 2.2.3.2 concentration\n 3. illumination (class)\n 3.1 source (class)\n 3.1.1 type (e.g. Hg lamp)\n 3.1.2 intensity\n 3.1.3 emission spectra\n 3.2 optical element (class) (e.g. excitation filter)\n 3.3 optical element (class) (e.g. emission filter)\n 3.4 optical element (class) (e.g. dichroic mirror)\n 4. microscope\n 4.1 type (Olympus iX 73)\n 4.2 objective (class)\n 4.2.1 numerical aperature (e.g. 0.3)\n 4.2.2 magnification (e.g. 20X)\n 4.2.3 field of view (e.g. 500 x 500 um)\n 4.2.4 depth of focus (e.g 4.1 microns)\n \"\"\"\n self.name = name\n self.chip = chip\n self.optics = optics\n self.fluid_handling_system = fluid_handling_system\n\nclass chip(object):\n\n def __init__(self, channel=None, bpe=None, reservoir=None, electrodes=None, fluid_handling_system=None,\n material_in_optical_path=None, thickness_in_optical_path=None):\n \"\"\"\n Everything important about the chip\n \"\"\"\n #self.material = material # deprecated so the channel class can hold this information\n self.channel = channel\n self.bpe = bpe\n self.electrodes = electrodes\n self.fluid_handling_system = fluid_handling_system\n self.material_in_optical_path = material_in_optical_path\n self.thickness_in_optical_path = thickness_in_optical_path\n\nclass channel(object):\n\n def __init__(self, length=None, width=None, height=None,\n material_bottom_wall_surface=None, material_top_wall_surface=None, material_fluid=None):\n \"\"\"\n Everything important about the chip\n \"\"\"\n self.length = length\n self.width = width\n self.height = height\n self.material_bottom_wall_surface = material_bottom_wall_surface # material should only hold relevant electrokinetic data\n self.material_top_wall_surface = material_top_wall_surface # material should only hold relevant elect\n self.material_fluid = material_fluid # could be a mixture of liquid materials + fluorescent particles\n\nclass bpe(object):\n\n def __init__(self, length=None, width=None, height=None, material=None, adhesion_material=None,\n dielectric_coating=None):\n \"\"\"\n Everything important about the chip\n \"\"\"\n self.length = length\n self.linspace_x = np.linspace(-length/2, length/2, num=100)\n self.width = width\n self.height = height\n self.material = material\n\n if self.material.thickness:\n if self.material.thickness != self.height:\n raise ValueError(\"BPE height must equal BPE material thickness\")\n\n # adhesion layer used for thin metal film BPE\n self.adhesion_material = adhesion_material\n\n # dielectric coating on top of BPE\n if dielectric_coating:\n self.dielectric_coating = dielectric_coating\n else:\n self.dielectric_coating = material_solid(name='no_dielectric', permittivity=1, thickness=1e-12, Ka=6, Kb=2, reaction_site_density=5)\n\nclass optics(object):\n def __init__(self, microscope, fluorescent_particles=None, calibration_grid=None, pixel_to_micron_scaling=None):\n\n self.microscope = microscope\n self.fluorescent_particles = fluorescent_particles\n self.calibration_grid = calibration_grid\n\n if self.microscope.objective.magnification == 50:\n self.pixel_to_micron_scaling = 0.60 # (microns/pixels)\n elif self.microscope.objective.magnification == 20:\n self.pixel_to_micron_scaling = 1.55 # (microns/pixels)\n else:\n raise ValueError(\"Unable to determine microns/pixels scaling because objective magnification not 50X or 20X\")\n\n if pixel_to_micron_scaling is not None:\n print(\"Manual input of pixel_to_micron_scaling is deprecated. A scaling factor of {} um/pix for {} magnification was instantiated.\".format(self.pixel_to_micron_scaling, self.microscope.objective.magnification))\n \"\"\"\n --- I THINK THIS SECTION IS DEPRECATED ---\n Notes: deprecated because calculating the scaling factor or entering it manually is too confusing. I have\n permanently figured out the correct scaling.\n \n if microscope.objective.pixel_to_micron is not None and pixel_to_micron_scaling is None:\n self.pixel_to_micron = microscope.objective.pixel_to_micron\n elif microscope.objective.pixel_to_micron is not None and pixel_to_micron_scaling is not None and microscope.objective.pixel_to_micron != pixel_to_micron_scaling:\n raise ValueError(\"Conflicting scaling factors: microscope.objective={}, optics={}\".format(microscope.objective.pixel_to_micron, pixel_to_micron_scaling))\n elif microscope.objective.pixel_to_micron is None and pixel_to_micron_scaling is not None:\n self.pixel_to_micron = pixel_to_micron_scaling\n \"\"\"\n\nclass illumination(object):\n\n def __init__(self, basePath=None, source=None, excitation=None, emission=None, dichroic=None, illumination_distribution=None,\n calculate_illumination_distribution=False,\n illumPath=None, illumSavePath=None, illumSaveName=None, showIllumPlot=False, save_txt=False, save_plot=False, save_image=False):\n \"\"\"\n details about the optical setup\n :param source:\n :param excitation:\n :param emission:\n :param dichroic:\n \"\"\"\n self.basePath = basePath # this should come from CurlypivTestCollection\n self.source = source\n self.excitation_wavelength = excitation\n self.emission_wavelength = emission\n self.dichroic = dichroic\n\n if illumination_distribution is not None:\n self.illumination_distribution = illumination_distribution\n elif illumPath is not None:\n flatfield = io.imread(illumPath, plugin='tifffile')\n if len(np.shape(flatfield)) > 2:\n flatfield = np.asarray(np.rint(np.mean(flatfield, axis=0)), dtype='uint16')\n self.illumination_distribution = flatfield\n elif calculate_illumination_distribution and illumination_distribution is None:\n self.illumination_distribution = measureIlluminationDistributionXY(basePath=self.basePath, illumPath=illumPath,\n show_image=showIllumPlot, save_image=save_image, save_img_type='.tif',\n save_txt=save_txt, show_plot=showIllumPlot, save_plot=save_plot,\n savePath=illumSavePath, savename=illumSaveName)\n else:\n self.illumination_distribution = illumination_distribution\n\n self.flatfield = self.illumination_distribution\n\n if self.flatfield is not None:\n self.flatfield_mean = np.mean(self.flatfield)\n self.flatfield_std = np.std(self.flatfield)\n\nclass darkfield(object):\n\n def __init__(self, basePath, darkframePath=None, flip_image_across_axis=None, show_image=False, save_image=False, save_img_type='.tif',\n savePath=None, savename=None, save_plot=False):\n \"\"\"\n details about dark field image\n\n \"\"\"\n self.basePath = basePath\n\n img, mean, std = calculate_darkfield(self.basePath, darkframePath=darkframePath, flip_image_axes=flip_image_across_axis, show_image=show_image, save_image=save_image, save_img_type=save_img_type,\n savePath=savePath, savename=savename, save_plot=save_plot)\n\n self.img = img\n self.mean = mean\n self.std = std\n\nclass microscope(object):\n\n def __init__(self, type, objective, illumination, ccd):\n \"\"\"\n describes the micrscope setup\n :param type:\n :param objective:\n \"\"\"\n self.type = type # e.g. Olympus iX73\n self.objective = objective\n self.illumination = illumination\n self.ccd = ccd\n\nclass ccd(object):\n\n def __init__(self, exposure_time, img_acq_rate, EM_gain, name='iXon Ultra 897', img_acq_type='emcdd', darkfield=None, binning=None,\n vertical_pixel_shift_speed=0.5e-6, horizontal_pixel_shift_speed=0.1e-6, horizontal_pixel_shift_rate_bits=14,\n frame_transfer=True, crop_mode=False, acquisition_mode='kinetic', triggering='internal', readout_mode='image',\n pixels=512, pixel_size=16e-6):\n \"\"\"\n describe the CCD class\n \"\"\"\n self.name = name\n self.img_acq_type = img_acq_type\n\n self.exposure_time = exposure_time\n self.img_acq_rate = img_acq_rate\n self.em_gain = EM_gain\n self.darkfield = darkfield\n self.binning = binning\n\n # supporting camera acquisition settings\n self.vpss = vertical_pixel_shift_speed\n self.hpss = horizontal_pixel_shift_speed\n self.hpss_bits = horizontal_pixel_shift_rate_bits\n self.frame_transfer = frame_transfer\n self.crop_mode = crop_mode\n self.acquisition_mode = acquisition_mode\n self.triggering = triggering\n self.readout_mode = readout_mode\n\n if isinstance(pixels, int):\n self.pixels = (pixels, pixels)\n else:\n self.pixels = pixels\n self.pixel_size = pixel_size\n self.image_area = (self.pixels[0]*pixel_size, self.pixels[1]*pixel_size)\n\n\nclass objective(object):\n\n def __init__(self, fluoro_particle, name=None, numerical_aperture=None, magnification=None, basePath=None, channel_height=None, illumination=None, wavelength=None, microgrid=None, auto_calc_pix_to_micron_scaling=False, pixel_to_micron=None, field_number=None, n0=1, show_depth_plot=False, save_depth_plot=False):\n \"\"\"\n\n Objectives in the Pennathur Lab Dark Room uScope:\n\n 20X - LCPlanFL N 20X LCD [LCPLFLN20xLCD]\n magnification: 20\n numerical_aperture: 0.45\n field_number: 26.5\n working distance: 7.4 - 8.3 mm\n transmittance: 90% @ 425 - 670 nm\n correction collar: 0 - 1.2 mm\n microns per pixel: 1.55\n 50X - LCPlanFL N 50x LCD [LCPLFLN50xLCD]\n magnification: 50\n numerical aperture: 0.7\n field number: 26.5\n working distance: 2.2 - 3 mm\n transmittance: 90% @ 425 - 650 nm\n correction collar: 0 - 1.2 mm\n microns per pixel: 0.6\n\n Manufacturer website: https://www.olympus-ims.com/en/microscope/lcplfln-lcd/#!cms[focus]=cmsContent11428\n \"\"\"\n\n # if name is entered, then pull all the terms directly\n self.name = name\n\n if name == 'LCPLFLN20xLCD':\n self.magnification = 20\n self.numerical_aperture = 0.45\n self.field_number = 26.5\n self.transmittance = 0.9\n self.pixel_to_micron = 1.55\n elif name == 'LCPLFLN50xLCD':\n self.magnification = 50\n self.numerical_aperture = 0.7\n self.field_number = 26.5\n self.transmittance = 0.9\n self.pixel_to_micron = 0.6\n else:\n self.numerical_aperture = numerical_aperture\n self.magnification = magnification\n self.field_number = field_number\n\n # general terms\n self._illumination = illumination\n if self._illumination is not None:\n self._wavelength = self._illumination.emission_wavelength\n elif wavelength is not None:\n self._wavelength = wavelength\n else:\n raise ValueError(\"A wavelength is required via the <illumination> class or <wavelength> input parameter\")\n self._pd = fluoro_particle.diameter\n self._n0 = n0\n self.calculate_depth_of_field()\n self.calculate_depth_of_correlation()\n\n if field_number:\n self.calculate_field_of_view()\n\n if show_depth_plot or save_depth_plot:\n plot_field_depth(depth_of_corr=self.depth_of_correlation, depth_of_field=self.depth_of_field, show_depth_plot=show_depth_plot, save_depth_plot=save_depth_plot,\n basePath=basePath, savename=None, channel_height=channel_height, objective=self.magnification)\n\n # grids and scaling factors\n if auto_calc_pix_to_micron_scaling and self.pixel_to_micron is None:\n self.microgrid = microgrid\n self.calculate_pixel_to_micron_scaling()\n\n\n def calculate_field_of_view(self):\n self.field_of_view = self.field_number / self.magnification\n\n def calculate_depth_of_field(self, e=16e-6, n=1):\n \"\"\"\n e: CCD pixel resolution example: e = 16 um (16 microns is the pixel size)\n \"\"\"\n self.depth_of_field = self._wavelength*n/self.numerical_aperture**2+e*n/(self.magnification*self.numerical_aperture)\n\n def calculate_depth_of_correlation(self, eps=0.01):\n # step 0: define\n n = self._n0\n dp = self._pd\n NA = self.numerical_aperture\n M = self.magnification\n lmbda = self._wavelength\n\n # step 1: calculate the depth of correlation for the optical setup\n depth_of_correlation = calculate_depth_of_correlation(M=M, NA=NA, dp=dp, n=n, lmbda=lmbda, eps=eps)\n\n self.depth_of_correlation = depth_of_correlation\n\n def calculate_pixel_to_micron_scaling(self):\n if self.microgrid is None:\n raise ValueError(\"Need objective.microgrid property in order to calculate scaling factor\")\n # script to calculate scaling factor from grid\n # would go here\n\n @property\n def NA(self):\n return self.numerical_aperture\n\n @property\n def M(self):\n return self.magnification\n\nclass microgrid(object):\n\n def __init__(self, gridPath=None, center_to_center_spacing=None, feature_width=None, grid_type='grid', show_grid=False):\n \"\"\"\n this class holds images for the microgrid and performs pixel to micron scaling calculations\n \"\"\"\n if gridPath is not None:\n self.gridPath = gridPath\n self.spacing = center_to_center_spacing\n self.width = feature_width\n self.grid_type = grid_type\n\n # find files in directory\n file_list = glob.glob(join(self.gridPath, 'grid*.tif'))\n\n if len(file_list) < 1:\n raise ValueError(\"No grid*.tif files found in {}\".format(self.gridPath))\n\n img_grid = np.zeros(shape=(512,512))\n for f in file_list:\n img = io.imread(f, plugin='tifffile')\n if len(np.shape(img)) > 2:\n img = np.mean(img, axis=0)\n img_grid += img\n\n img_grid = img_grid / len(file_list)\n\n self.img_grid = img_grid\n\n if show_grid is True:\n fig, ax = plt.subplots()\n ax.imshow(img_grid, cmap='gray')\n\n ax.set_xlabel('pixels')\n ax.set_ylabel('pixels')\n plt.title('grid: 10 um Lines; 50 um Spacing')\n plt.show()\n\n\nclass fluorescent_particles(object):\n\n def __init__(self, name=None, materials=None,diameter=None,fluorescence_spectra=None, concentration=None,\n electrophoretic_mobility=None, zeta=None):\n \"\"\"\n the details of the fluroescent particles used\n :param materials:\n :param diameter:\n :param fluorescence_spectra:\n :param concentration:\n :param electrophoretic_mobility:\n :param zeta:\n \"\"\"\n\n self.name = name\n self.materials=materials\n self.concentration=concentration\n self.electrophoretic_mobility=electrophoretic_mobility\n self.zeta=zeta\n self.diameter=diameter\n if diameter:\n k_b = 1.3806e-23\n T=298\n mu=0.001\n self.diffusivity = k_b*T/(6*np.pi*mu*diameter/2)\n\n self.fluorescence_spectra=fluorescence_spectra\n\n\nclass reservoir(object):\n\n def __init__(self, diameter, height, height_of_reservoir=None, material=None):\n \"\"\"\n describes the micrscope setup\n :param type:\n :param objective:\n \"\"\"\n g = 9.81 # m/s**2\n\n self.material = material\n self.diameter = diameter\n self.height = height\n self.volume = np.pi*self.diameter**2/4\n self.height_of_reservoir = height_of_reservoir\n if material and height_of_reservoir:\n self.hydrostatic_pressure = material.density*g*self.height_of_reservoir\n\nclass fluid_handling_system(object):\n\n def __init__(self, fluid_reservoir=None, all_tubing=None, onchip_reservoir=None):\n \"\"\"\n describes the fluid handling system\n \"\"\"\n self.fluid_reservoir=fluid_reservoir\n self.all_tubing = all_tubing\n self.onchip_reservoir = onchip_reservoir\n\nclass tubing(object):\n\n def __init__(self, inner_diameter=None, length=None, material=None):\n \"\"\"\n describes each segment of tubing\n\n \"\"\"\n self.inner_diameter = inner_diameter\n self.length = length\n self.material = material\n\nclass optical_element(object):\n\n def __init__(self, passing_wavelengths=None, reflectivity=None):\n \"\"\"\n this class describes the optical characteristics of any material or element\n :param wavelength_bandpass:\n \"\"\"\n self.passing_wavelengths=passing_wavelengths\n self.reflectivity=reflectivity\n\nclass measurable_quantity(object):\n\n def __init__(self, reference_value=None, measured_value=None):\n \"\"\"\n what value was measured and when\n \"\"\"\n self.reference_value = reference_value\n self.measured_value = measured_value\n\nclass measurement(object):\n\n def __init__(self, value=None, date=None):\n \"\"\"\n Object for storing measurements\n :param value:\n :param date:\n \"\"\"\n self.value = value\n self.date = date\n\nclass electrode_configuration(object):\n\n def __init__(self, material=None, length=None, entrance_length=None):\n \"\"\"\n Object for holding electrode configuration details\n :param material:\n :param length:\n :param entrance_length:\n \"\"\"\n self.material = material\n self.length = length\n self.entrance_length = entrance_length\n\nclass material_solid(object):\n\n def __init__(self, name=None, zeta=None, concentration=None, index_of_refraction=None, transparency=None, fluorescence_spectra=None,\n permittivity=None, conductivity=None, thickness=None, youngs_modulus=None, poissons_ratio=None,\n density=None, dielectric_strength=None, reaction_site_density=None, Ka=None, Kb=None, width=None, length=None):\n \"\"\"\n everything about a material\n :param transparency:\n :param fluorescence_spectra:\n :param zeta:\n \"\"\"\n # identity\n self.name = name\n\n # geometry\n self.length = length\n self.width = width\n self.thickness = thickness\n\n # mechanical\n self.density = density\n self.concentration = concentration # For a solid, this is % by volume.\n self.youngs_modulus = youngs_modulus\n self.poissons_ratio = poissons_ratio\n\n # optical\n self.index_of_refraction = index_of_refraction\n self.fluorescence_spectra = fluorescence_spectra\n self.transparency = transparency\n if self.transparency:\n self.reflectivity = 1 / self.transparency\n\n # electrochemical\n self.conductivity = conductivity\n if permittivity:\n self.permittivity = permittivity\n self.zeta = zeta\n self.dielectric_strength = dielectric_strength\n if reaction_site_density:\n self.reaction_site_density = reaction_site_density*1e18 # (#/nm2) surface density of reaction sites: accepts nm2 and converts to m2 (see Squires)\n self.Ka = Ka # reaction equilibrium constant - upper bound\n self.Kb = Kb # reaction equilibrium constant - lower bound\n\nclass material_liquid(object):\n\n def __init__(self, name=None, species=None, concentration=None, conductivity=None, pH=None, density=None, viscosity=None,\n permittivity=None, temperature=None, valence=1.0):\n \"\"\"\n everything about a liquid\n :param species:\n :param concentration:\n :param conductivity:\n :param pH:\n \"\"\"\n # identity\n self.name = name\n\n # electro/chemical\n self.species = species\n self.concentration = concentration # (mmol) = (mmol/L) = (mol/m3)\n self.conductivity = conductivity\n if permittivity:\n self.permittivity = permittivity\n if pH:\n self.pH = pH\n self.c_H = 10**-pH * 1e3 # (mmol) = (mmol/L) = (mol/m3); (concentration of Hydrogen ions (H+)\n self.valence = valence\n\n # mechanical\n self.density = density\n self.viscosity = viscosity\n self.temperature = temperature\n self.diffusivity = 2e-9 # (m^2/s) Diffusivity of KCl in DI water [Soni]",
"step-ids": [
37,
41,
45,
48,
50
]
}
|
[
37,
41,
45,
48,
50
] |
from django.conf.urls import patterns, url
from riskDashboard2 import views
urlpatterns = patterns('',
#url(r'getdata', views.vulnData, name='getdata'),
url(r'appmanagement', views.appmanagement, name='appmanagement'),
url(r'^.*', views.index, name='index'),
)
|
normal
|
{
"blob_id": "3372d98ff91d90558a87293d4032820b1662d60b",
"index": 298,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = patterns('', url('appmanagement', views.appmanagement, name=\n 'appmanagement'), url('^.*', views.index, name='index'))\n",
"step-3": "from django.conf.urls import patterns, url\nfrom riskDashboard2 import views\nurlpatterns = patterns('', url('appmanagement', views.appmanagement, name=\n 'appmanagement'), url('^.*', views.index, name='index'))\n",
"step-4": "from django.conf.urls import patterns, url\n\nfrom riskDashboard2 import views\n\nurlpatterns = patterns('',\n #url(r'getdata', views.vulnData, name='getdata'),\n url(r'appmanagement', views.appmanagement, name='appmanagement'),\n url(r'^.*', views.index, name='index'),\n)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def savitzky_golay(y, window_size, order, deriv=0, rate=1):
order_range = range(order + 1)
half_window = (window_size - 1) // 2
b = np.mat([[(k ** i) for i in order_range] for k in range(-half_window,
half_window + 1)])
m = np.linalg.pinv(b).A[deriv] * rate ** deriv * factorial(deriv)
firstvals = y[0] - np.abs(y[1:half_window + 1][::-1] - y[0])
lastvals = y[-1] + np.abs(y[-half_window - 1:-1][::-1] - y[-1])
y = np.concatenate((firstvals, y, lastvals))
return np.convolve(m[::-1], y, mode='valid')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def savitzky_golay(y, window_size, order, deriv=0, rate=1):
order_range = range(order + 1)
half_window = (window_size - 1) // 2
b = np.mat([[(k ** i) for i in order_range] for k in range(-half_window,
half_window + 1)])
m = np.linalg.pinv(b).A[deriv] * rate ** deriv * factorial(deriv)
firstvals = y[0] - np.abs(y[1:half_window + 1][::-1] - y[0])
lastvals = y[-1] + np.abs(y[-half_window - 1:-1][::-1] - y[-1])
y = np.concatenate((firstvals, y, lastvals))
return np.convolve(m[::-1], y, mode='valid')
with open(
'3v1_/learning_curves/model-prey-s/seed_pgddpg_0.8/pre_trained_prey_20200910204032/model-prey-s_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data0 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data0))
with open(
'3v1_/learning_curves/model-prey-01/seed_pgddpg_0.8/pre_trained_prey_20200910200405/model-prey-01_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data1 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data1))
with open(
'3v1_/learning_curves/model-prey-02/seed_pgddpg_0.8/pre_trained_prey_20200910200419/model-prey-02_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data2 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data2))
with open(
'3v1_/learning_curves/model-prey-03/seed_pgddpg_0.8/pre_trained_prey_20200910200427/model-prey-03_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data3 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data3))
with open(
'3v1_/learning_curves/model-prey-04/seed_pgddpg_0.8/pre_trained_prey_20200910200435/model-prey-04_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data4 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data4))
with open(
'3v1_/learning_curves/model-prey-23/seed_pgddpg_0.8/pre_trained_prey_20200910200115/model-prey-23_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data5 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data5))
with open(
'3v1_/learning_curves/model-prey-06/seed_pgddpg_0.8/pre_trained_prey_20200910200446/model-prey-06_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data6 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data6))
with open(
'3v1_/learning_curves/model-prey-07/seed_pgddpg_0.8/pre_trained_prey_20200910200455/model-prey-07_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data7 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data7))
with open(
'3v1_/learning_curves/model-prey-08/seed_pgddpg_0.8/pre_trained_prey_20200910200504/model-prey-08_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data8 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data8))
with open(
'3v1_/learning_curves/model-prey-09/seed_pgddpg_0.8/pre_trained_prey_20200910200512/model-prey-09_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data9 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data9))
with open(
'3v1_/learning_curves/model-prey-s/seed_ddpg/20200912103349/model-prey-s_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data0 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data0))
with open(
'3v1_/learning_curves/model-prey-01/seed_ddpg/20200912103401/model-prey-01_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data1 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data1))
with open(
'3v1_/learning_curves/model-prey-02/seed_ddpg/20200912103408/model-prey-02_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data2 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data2))
with open(
'3v1_/learning_curves/model-prey-03/seed_ddpg/20200912103416/model-prey-03_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data3 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data3))
with open(
'3v1_/learning_curves/model-prey-04/seed_ddpg/20200912103421/model-prey-04_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data4 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data4))
with open(
'3v1_/learning_curves/model-prey-23/seed_ddpg/20200912103327/model-prey-23_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data5 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data5))
with open(
'3v1_/learning_curves/model-prey-06/seed_ddpg/20200912103427/model-prey-06_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data6 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data6))
with open(
'3v1_/learning_curves/model-prey-07/seed_ddpg/20200912103433/model-prey-07_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data7 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data7))
with open(
'3v1_/learning_curves/model-prey-08/seed_ddpg/20200912103440/model-prey-08_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data8 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data8))
with open(
'3v1_/learning_curves/model-prey-09/seed_ddpg/20200912103446/model-prey-09_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data9 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data9))
with open(
'3v1_/learning_curves/model-prey-s/seed_maddpg/20200910205027/model-prey-s_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data0 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data0))
with open(
'3v1_/learning_curves/model-prey-01/seed_maddpg/20200910205033/model-prey-01_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data1 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data1))
with open(
'3v1_/learning_curves/model-prey-02/seed_maddpg/20200910205040/model-prey-02_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data2 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data2))
with open(
'3v1_/learning_curves/model-prey-03/seed_maddpg/20200910205046/model-prey-03_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data3 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data3))
with open(
'3v1_/learning_curves/model-prey-04/seed_maddpg/20200910205052/model-prey-04_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data4 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data4))
with open(
'3v1_/learning_curves/model-prey-23/seed_maddpg/20200910205019/model-prey-23_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data5 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data5))
with open(
'3v1_/learning_curves/model-prey-06/seed_maddpg/20200910205104/model-prey-06_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data6 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data6))
with open(
'3v1_/learning_curves/model-prey-07/seed_maddpg/20200910205135/model-prey-07_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data7 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data7))
with open(
'3v1_/learning_curves/model-prey-08/seed_maddpg/20200910205147/model-prey-08_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data8 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data8))
with open(
'3v1_/learning_curves/model-prey-09/seed_maddpg/20200910205155/model-prey-09_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data9 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data9))
<|reserved_special_token_0|>
print(end)
<|reserved_special_token_0|>
plt.figure()
plt.plot(zz, pgddpg_vs_prey00, label='pgddpg_vs_prey00', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, ddpg_vs_prey00, label='ddpg_vs_prey00', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey00, label='maddpg_vs_prey00', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey01, label='pgddpg_vs_prey01', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey02, label='pgddpg_vs_prey02', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey03, label='pgddpg_vs_prey03', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey04, label='pgddpg_vs_prey04', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey05, label='pgddpg_vs_prey05', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey06, label='pgddpg_vs_prey06', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey07, label='pgddpg_vs_prey07', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey08, label='pgddpg_vs_prey08', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey09, label='pgddpg_vs_prey09', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, ddpg_vs_prey01, label='ddpg_vs_prey01', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey02, label='ddpg_vs_prey02', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey03, label='ddpg_vs_prey03', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey04, label='ddpg_vs_prey04', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey05, label='ddpg_vs_prey05', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey06, label='ddpg_vs_prey06', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey07, label='ddpg_vs_prey07', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey08, label='ddpg_vs_prey08', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey09, label='ddpg_vs_prey09', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey01, label='maddpg_vs_prey01', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey02, label='maddpg_vs_prey02', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey03, label='maddpg_vs_prey03', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey04, label='maddpg_vs_prey04', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey05, label='maddpg_vs_prey05', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey06, label='maddpg_vs_prey06', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey07, label='maddpg_vs_prey07', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey08, label='maddpg_vs_prey08', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey09, label='maddpg_vs_prey09', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.tick_params(labelsize=23)
<|reserved_special_token_0|>
plt.title('Different Seeds', font2)
plt.xlabel('Episodes', font2)
plt.ylabel('avg_success_rate', font2)
plt.legend(labels=['pgddpg($\\beta=0.8$) vs preys',
'ddpg($\\alpha=1$) vs preys', 'maddpg($\\alpha=5$) vs preys'])
plt.show()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def savitzky_golay(y, window_size, order, deriv=0, rate=1):
order_range = range(order + 1)
half_window = (window_size - 1) // 2
b = np.mat([[(k ** i) for i in order_range] for k in range(-half_window,
half_window + 1)])
m = np.linalg.pinv(b).A[deriv] * rate ** deriv * factorial(deriv)
firstvals = y[0] - np.abs(y[1:half_window + 1][::-1] - y[0])
lastvals = y[-1] + np.abs(y[-half_window - 1:-1][::-1] - y[-1])
y = np.concatenate((firstvals, y, lastvals))
return np.convolve(m[::-1], y, mode='valid')
with open(
'3v1_/learning_curves/model-prey-s/seed_pgddpg_0.8/pre_trained_prey_20200910204032/model-prey-s_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data0 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data0))
with open(
'3v1_/learning_curves/model-prey-01/seed_pgddpg_0.8/pre_trained_prey_20200910200405/model-prey-01_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data1 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data1))
with open(
'3v1_/learning_curves/model-prey-02/seed_pgddpg_0.8/pre_trained_prey_20200910200419/model-prey-02_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data2 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data2))
with open(
'3v1_/learning_curves/model-prey-03/seed_pgddpg_0.8/pre_trained_prey_20200910200427/model-prey-03_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data3 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data3))
with open(
'3v1_/learning_curves/model-prey-04/seed_pgddpg_0.8/pre_trained_prey_20200910200435/model-prey-04_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data4 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data4))
with open(
'3v1_/learning_curves/model-prey-23/seed_pgddpg_0.8/pre_trained_prey_20200910200115/model-prey-23_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data5 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data5))
with open(
'3v1_/learning_curves/model-prey-06/seed_pgddpg_0.8/pre_trained_prey_20200910200446/model-prey-06_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data6 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data6))
with open(
'3v1_/learning_curves/model-prey-07/seed_pgddpg_0.8/pre_trained_prey_20200910200455/model-prey-07_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data7 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data7))
with open(
'3v1_/learning_curves/model-prey-08/seed_pgddpg_0.8/pre_trained_prey_20200910200504/model-prey-08_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data8 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data8))
with open(
'3v1_/learning_curves/model-prey-09/seed_pgddpg_0.8/pre_trained_prey_20200910200512/model-prey-09_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data9 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data9))
with open(
'3v1_/learning_curves/model-prey-s/seed_ddpg/20200912103349/model-prey-s_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data0 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data0))
with open(
'3v1_/learning_curves/model-prey-01/seed_ddpg/20200912103401/model-prey-01_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data1 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data1))
with open(
'3v1_/learning_curves/model-prey-02/seed_ddpg/20200912103408/model-prey-02_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data2 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data2))
with open(
'3v1_/learning_curves/model-prey-03/seed_ddpg/20200912103416/model-prey-03_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data3 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data3))
with open(
'3v1_/learning_curves/model-prey-04/seed_ddpg/20200912103421/model-prey-04_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data4 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data4))
with open(
'3v1_/learning_curves/model-prey-23/seed_ddpg/20200912103327/model-prey-23_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data5 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data5))
with open(
'3v1_/learning_curves/model-prey-06/seed_ddpg/20200912103427/model-prey-06_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data6 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data6))
with open(
'3v1_/learning_curves/model-prey-07/seed_ddpg/20200912103433/model-prey-07_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data7 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data7))
with open(
'3v1_/learning_curves/model-prey-08/seed_ddpg/20200912103440/model-prey-08_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data8 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data8))
with open(
'3v1_/learning_curves/model-prey-09/seed_ddpg/20200912103446/model-prey-09_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data9 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data9))
with open(
'3v1_/learning_curves/model-prey-s/seed_maddpg/20200910205027/model-prey-s_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data0 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data0))
with open(
'3v1_/learning_curves/model-prey-01/seed_maddpg/20200910205033/model-prey-01_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data1 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data1))
with open(
'3v1_/learning_curves/model-prey-02/seed_maddpg/20200910205040/model-prey-02_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data2 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data2))
with open(
'3v1_/learning_curves/model-prey-03/seed_maddpg/20200910205046/model-prey-03_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data3 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data3))
with open(
'3v1_/learning_curves/model-prey-04/seed_maddpg/20200910205052/model-prey-04_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data4 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data4))
with open(
'3v1_/learning_curves/model-prey-23/seed_maddpg/20200910205019/model-prey-23_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data5 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data5))
with open(
'3v1_/learning_curves/model-prey-06/seed_maddpg/20200910205104/model-prey-06_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data6 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data6))
with open(
'3v1_/learning_curves/model-prey-07/seed_maddpg/20200910205135/model-prey-07_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data7 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data7))
with open(
'3v1_/learning_curves/model-prey-08/seed_maddpg/20200910205147/model-prey-08_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data8 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data8))
with open(
'3v1_/learning_curves/model-prey-09/seed_maddpg/20200910205155/model-prey-09_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data9 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data9))
smooth_neighbor = 5
start = 0
end = 400
ddpg_vs_prey00 = savitzky_golay(np.array(ddpg_dict_data0[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey01 = savitzky_golay(np.array(ddpg_dict_data1[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey02 = savitzky_golay(np.array(ddpg_dict_data2[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey03 = savitzky_golay(np.array(ddpg_dict_data3[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey04 = savitzky_golay(np.array(ddpg_dict_data4[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey05 = savitzky_golay(np.array(ddpg_dict_data5[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey06 = savitzky_golay(np.array(ddpg_dict_data6[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey07 = savitzky_golay(np.array(ddpg_dict_data7[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey08 = savitzky_golay(np.array(ddpg_dict_data8[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey09 = savitzky_golay(np.array(ddpg_dict_data9[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey00 = savitzky_golay(np.array(maddpg_dict_data0[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey01 = savitzky_golay(np.array(maddpg_dict_data1[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey02 = savitzky_golay(np.array(maddpg_dict_data2[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey03 = savitzky_golay(np.array(maddpg_dict_data3[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey04 = savitzky_golay(np.array(maddpg_dict_data4[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey05 = savitzky_golay(np.array(maddpg_dict_data5[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey06 = savitzky_golay(np.array(maddpg_dict_data6[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey07 = savitzky_golay(np.array(maddpg_dict_data7[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey08 = savitzky_golay(np.array(maddpg_dict_data8[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey09 = savitzky_golay(np.array(maddpg_dict_data9[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey00 = savitzky_golay(np.array(pgddpg_dict_data0[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey01 = savitzky_golay(np.array(pgddpg_dict_data1[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey02 = savitzky_golay(np.array(pgddpg_dict_data2[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey03 = savitzky_golay(np.array(pgddpg_dict_data3[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey04 = savitzky_golay(np.array(pgddpg_dict_data4[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey05 = savitzky_golay(np.array(pgddpg_dict_data5[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey06 = savitzky_golay(np.array(pgddpg_dict_data6[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey07 = savitzky_golay(np.array(pgddpg_dict_data7[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey08 = savitzky_golay(np.array(pgddpg_dict_data8[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey09 = savitzky_golay(np.array(pgddpg_dict_data9[start:end]),
smooth_neighbor, 3)
print(end)
zz = range(0, end - start)
zz = np.multiply(100, zz)
plt.figure()
plt.plot(zz, pgddpg_vs_prey00, label='pgddpg_vs_prey00', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, ddpg_vs_prey00, label='ddpg_vs_prey00', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey00, label='maddpg_vs_prey00', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey01, label='pgddpg_vs_prey01', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey02, label='pgddpg_vs_prey02', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey03, label='pgddpg_vs_prey03', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey04, label='pgddpg_vs_prey04', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey05, label='pgddpg_vs_prey05', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey06, label='pgddpg_vs_prey06', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey07, label='pgddpg_vs_prey07', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey08, label='pgddpg_vs_prey08', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey09, label='pgddpg_vs_prey09', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, ddpg_vs_prey01, label='ddpg_vs_prey01', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey02, label='ddpg_vs_prey02', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey03, label='ddpg_vs_prey03', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey04, label='ddpg_vs_prey04', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey05, label='ddpg_vs_prey05', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey06, label='ddpg_vs_prey06', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey07, label='ddpg_vs_prey07', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey08, label='ddpg_vs_prey08', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey09, label='ddpg_vs_prey09', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey01, label='maddpg_vs_prey01', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey02, label='maddpg_vs_prey02', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey03, label='maddpg_vs_prey03', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey04, label='maddpg_vs_prey04', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey05, label='maddpg_vs_prey05', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey06, label='maddpg_vs_prey06', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey07, label='maddpg_vs_prey07', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey08, label='maddpg_vs_prey08', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey09, label='maddpg_vs_prey09', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.tick_params(labelsize=23)
font2 = {'family': 'Times New Roman', 'weight': 'normal', 'size': 30}
plt.title('Different Seeds', font2)
plt.xlabel('Episodes', font2)
plt.ylabel('avg_success_rate', font2)
plt.legend(labels=['pgddpg($\\beta=0.8$) vs preys',
'ddpg($\\alpha=1$) vs preys', 'maddpg($\\alpha=5$) vs preys'])
plt.show()
<|reserved_special_token_1|>
import pickle
from numpy import *
import math
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import animation
from math import factorial
def savitzky_golay(y, window_size, order, deriv=0, rate=1):
order_range = range(order + 1)
half_window = (window_size - 1) // 2
b = np.mat([[(k ** i) for i in order_range] for k in range(-half_window,
half_window + 1)])
m = np.linalg.pinv(b).A[deriv] * rate ** deriv * factorial(deriv)
firstvals = y[0] - np.abs(y[1:half_window + 1][::-1] - y[0])
lastvals = y[-1] + np.abs(y[-half_window - 1:-1][::-1] - y[-1])
y = np.concatenate((firstvals, y, lastvals))
return np.convolve(m[::-1], y, mode='valid')
with open(
'3v1_/learning_curves/model-prey-s/seed_pgddpg_0.8/pre_trained_prey_20200910204032/model-prey-s_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data0 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data0))
with open(
'3v1_/learning_curves/model-prey-01/seed_pgddpg_0.8/pre_trained_prey_20200910200405/model-prey-01_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data1 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data1))
with open(
'3v1_/learning_curves/model-prey-02/seed_pgddpg_0.8/pre_trained_prey_20200910200419/model-prey-02_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data2 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data2))
with open(
'3v1_/learning_curves/model-prey-03/seed_pgddpg_0.8/pre_trained_prey_20200910200427/model-prey-03_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data3 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data3))
with open(
'3v1_/learning_curves/model-prey-04/seed_pgddpg_0.8/pre_trained_prey_20200910200435/model-prey-04_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data4 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data4))
with open(
'3v1_/learning_curves/model-prey-23/seed_pgddpg_0.8/pre_trained_prey_20200910200115/model-prey-23_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data5 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data5))
with open(
'3v1_/learning_curves/model-prey-06/seed_pgddpg_0.8/pre_trained_prey_20200910200446/model-prey-06_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data6 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data6))
with open(
'3v1_/learning_curves/model-prey-07/seed_pgddpg_0.8/pre_trained_prey_20200910200455/model-prey-07_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data7 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data7))
with open(
'3v1_/learning_curves/model-prey-08/seed_pgddpg_0.8/pre_trained_prey_20200910200504/model-prey-08_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data8 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data8))
with open(
'3v1_/learning_curves/model-prey-09/seed_pgddpg_0.8/pre_trained_prey_20200910200512/model-prey-09_sucess_record.pkl'
, 'rb') as fo:
pgddpg_dict_data9 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data9))
with open(
'3v1_/learning_curves/model-prey-s/seed_ddpg/20200912103349/model-prey-s_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data0 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data0))
with open(
'3v1_/learning_curves/model-prey-01/seed_ddpg/20200912103401/model-prey-01_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data1 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data1))
with open(
'3v1_/learning_curves/model-prey-02/seed_ddpg/20200912103408/model-prey-02_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data2 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data2))
with open(
'3v1_/learning_curves/model-prey-03/seed_ddpg/20200912103416/model-prey-03_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data3 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data3))
with open(
'3v1_/learning_curves/model-prey-04/seed_ddpg/20200912103421/model-prey-04_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data4 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data4))
with open(
'3v1_/learning_curves/model-prey-23/seed_ddpg/20200912103327/model-prey-23_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data5 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data5))
with open(
'3v1_/learning_curves/model-prey-06/seed_ddpg/20200912103427/model-prey-06_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data6 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data6))
with open(
'3v1_/learning_curves/model-prey-07/seed_ddpg/20200912103433/model-prey-07_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data7 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data7))
with open(
'3v1_/learning_curves/model-prey-08/seed_ddpg/20200912103440/model-prey-08_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data8 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data8))
with open(
'3v1_/learning_curves/model-prey-09/seed_ddpg/20200912103446/model-prey-09_sucess_record.pkl'
, 'rb') as fo:
ddpg_dict_data9 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data9))
with open(
'3v1_/learning_curves/model-prey-s/seed_maddpg/20200910205027/model-prey-s_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data0 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data0))
with open(
'3v1_/learning_curves/model-prey-01/seed_maddpg/20200910205033/model-prey-01_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data1 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data1))
with open(
'3v1_/learning_curves/model-prey-02/seed_maddpg/20200910205040/model-prey-02_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data2 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data2))
with open(
'3v1_/learning_curves/model-prey-03/seed_maddpg/20200910205046/model-prey-03_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data3 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data3))
with open(
'3v1_/learning_curves/model-prey-04/seed_maddpg/20200910205052/model-prey-04_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data4 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data4))
with open(
'3v1_/learning_curves/model-prey-23/seed_maddpg/20200910205019/model-prey-23_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data5 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data5))
with open(
'3v1_/learning_curves/model-prey-06/seed_maddpg/20200910205104/model-prey-06_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data6 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data6))
with open(
'3v1_/learning_curves/model-prey-07/seed_maddpg/20200910205135/model-prey-07_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data7 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data7))
with open(
'3v1_/learning_curves/model-prey-08/seed_maddpg/20200910205147/model-prey-08_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data8 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data8))
with open(
'3v1_/learning_curves/model-prey-09/seed_maddpg/20200910205155/model-prey-09_sucess_record.pkl'
, 'rb') as fo:
maddpg_dict_data9 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data9))
smooth_neighbor = 5
start = 0
end = 400
ddpg_vs_prey00 = savitzky_golay(np.array(ddpg_dict_data0[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey01 = savitzky_golay(np.array(ddpg_dict_data1[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey02 = savitzky_golay(np.array(ddpg_dict_data2[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey03 = savitzky_golay(np.array(ddpg_dict_data3[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey04 = savitzky_golay(np.array(ddpg_dict_data4[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey05 = savitzky_golay(np.array(ddpg_dict_data5[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey06 = savitzky_golay(np.array(ddpg_dict_data6[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey07 = savitzky_golay(np.array(ddpg_dict_data7[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey08 = savitzky_golay(np.array(ddpg_dict_data8[start:end]),
smooth_neighbor, 3)
ddpg_vs_prey09 = savitzky_golay(np.array(ddpg_dict_data9[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey00 = savitzky_golay(np.array(maddpg_dict_data0[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey01 = savitzky_golay(np.array(maddpg_dict_data1[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey02 = savitzky_golay(np.array(maddpg_dict_data2[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey03 = savitzky_golay(np.array(maddpg_dict_data3[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey04 = savitzky_golay(np.array(maddpg_dict_data4[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey05 = savitzky_golay(np.array(maddpg_dict_data5[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey06 = savitzky_golay(np.array(maddpg_dict_data6[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey07 = savitzky_golay(np.array(maddpg_dict_data7[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey08 = savitzky_golay(np.array(maddpg_dict_data8[start:end]),
smooth_neighbor, 3)
maddpg_vs_prey09 = savitzky_golay(np.array(maddpg_dict_data9[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey00 = savitzky_golay(np.array(pgddpg_dict_data0[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey01 = savitzky_golay(np.array(pgddpg_dict_data1[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey02 = savitzky_golay(np.array(pgddpg_dict_data2[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey03 = savitzky_golay(np.array(pgddpg_dict_data3[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey04 = savitzky_golay(np.array(pgddpg_dict_data4[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey05 = savitzky_golay(np.array(pgddpg_dict_data5[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey06 = savitzky_golay(np.array(pgddpg_dict_data6[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey07 = savitzky_golay(np.array(pgddpg_dict_data7[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey08 = savitzky_golay(np.array(pgddpg_dict_data8[start:end]),
smooth_neighbor, 3)
pgddpg_vs_prey09 = savitzky_golay(np.array(pgddpg_dict_data9[start:end]),
smooth_neighbor, 3)
print(end)
zz = range(0, end - start)
zz = np.multiply(100, zz)
plt.figure()
plt.plot(zz, pgddpg_vs_prey00, label='pgddpg_vs_prey00', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, ddpg_vs_prey00, label='ddpg_vs_prey00', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey00, label='maddpg_vs_prey00', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey01, label='pgddpg_vs_prey01', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey02, label='pgddpg_vs_prey02', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey03, label='pgddpg_vs_prey03', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey04, label='pgddpg_vs_prey04', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey05, label='pgddpg_vs_prey05', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey06, label='pgddpg_vs_prey06', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey07, label='pgddpg_vs_prey07', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey08, label='pgddpg_vs_prey08', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, pgddpg_vs_prey09, label='pgddpg_vs_prey09', linewidth=1,
linestyle='dashed', color='r', marker='o', markerfacecolor='red',
markersize=2)
plt.plot(zz, ddpg_vs_prey01, label='ddpg_vs_prey01', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey02, label='ddpg_vs_prey02', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey03, label='ddpg_vs_prey03', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey04, label='ddpg_vs_prey04', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey05, label='ddpg_vs_prey05', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey06, label='ddpg_vs_prey06', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey07, label='ddpg_vs_prey07', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey08, label='ddpg_vs_prey08', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey09, label='ddpg_vs_prey09', linewidth=1, linestyle
='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey01, label='maddpg_vs_prey01', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey02, label='maddpg_vs_prey02', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey03, label='maddpg_vs_prey03', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey04, label='maddpg_vs_prey04', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey05, label='maddpg_vs_prey05', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey06, label='maddpg_vs_prey06', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey07, label='maddpg_vs_prey07', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey08, label='maddpg_vs_prey08', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.plot(zz, maddpg_vs_prey09, label='maddpg_vs_prey09', linewidth=1,
linestyle='dashed', color='g', marker='.', markerfacecolor='red',
markersize=2)
plt.tick_params(labelsize=23)
font2 = {'family': 'Times New Roman', 'weight': 'normal', 'size': 30}
plt.title('Different Seeds', font2)
plt.xlabel('Episodes', font2)
plt.ylabel('avg_success_rate', font2)
plt.legend(labels=['pgddpg($\\beta=0.8$) vs preys',
'ddpg($\\alpha=1$) vs preys', 'maddpg($\\alpha=5$) vs preys'])
plt.show()
<|reserved_special_token_1|>
import pickle
from numpy import *
import math
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import animation
from math import factorial
def savitzky_golay(y, window_size, order, deriv=0, rate=1):
order_range = range(order+1)
half_window = (window_size -1) // 2
b = np.mat([[k**i for i in order_range] for k in range(-half_window, half_window+1)])
m = np.linalg.pinv(b).A[deriv] * rate**deriv * factorial(deriv)
firstvals = y[0] - np.abs( y[1:half_window+1][::-1] - y[0] )
lastvals = y[-1] + np.abs(y[-half_window-1:-1][::-1] - y[-1])
y = np.concatenate((firstvals, y, lastvals))
return np.convolve( m[::-1], y, mode='valid')
#pgddpg
with open("3v1_/learning_curves/model-prey-s/seed_pgddpg_0.8/pre_trained_prey_20200910204032/model-prey-s_sucess_record.pkl", 'rb') as fo:
pgddpg_dict_data0 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data0))
with open("3v1_/learning_curves/model-prey-01/seed_pgddpg_0.8/pre_trained_prey_20200910200405/model-prey-01_sucess_record.pkl", 'rb') as fo:
pgddpg_dict_data1 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data1))
with open("3v1_/learning_curves/model-prey-02/seed_pgddpg_0.8/pre_trained_prey_20200910200419/model-prey-02_sucess_record.pkl", 'rb') as fo:
pgddpg_dict_data2 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data2))
with open("3v1_/learning_curves/model-prey-03/seed_pgddpg_0.8/pre_trained_prey_20200910200427/model-prey-03_sucess_record.pkl", 'rb') as fo:
pgddpg_dict_data3 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data3 ))
with open("3v1_/learning_curves/model-prey-04/seed_pgddpg_0.8/pre_trained_prey_20200910200435/model-prey-04_sucess_record.pkl", 'rb') as fo:
pgddpg_dict_data4 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data4))
with open("3v1_/learning_curves/model-prey-23/seed_pgddpg_0.8/pre_trained_prey_20200910200115/model-prey-23_sucess_record.pkl", 'rb') as fo:
pgddpg_dict_data5 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data5))
with open("3v1_/learning_curves/model-prey-06/seed_pgddpg_0.8/pre_trained_prey_20200910200446/model-prey-06_sucess_record.pkl", 'rb') as fo:
pgddpg_dict_data6 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data6))
with open("3v1_/learning_curves/model-prey-07/seed_pgddpg_0.8/pre_trained_prey_20200910200455/model-prey-07_sucess_record.pkl", 'rb') as fo:
pgddpg_dict_data7 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data7))
with open("3v1_/learning_curves/model-prey-08/seed_pgddpg_0.8/pre_trained_prey_20200910200504/model-prey-08_sucess_record.pkl", 'rb') as fo:
pgddpg_dict_data8 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data8))
with open("3v1_/learning_curves/model-prey-09/seed_pgddpg_0.8/pre_trained_prey_20200910200512/model-prey-09_sucess_record.pkl", 'rb') as fo:
pgddpg_dict_data9 = pickle.load(fo, encoding='bytes')
print(len(pgddpg_dict_data9))
#ddpg
with open("3v1_/learning_curves/model-prey-s/seed_ddpg/20200912103349/model-prey-s_sucess_record.pkl", 'rb') as fo:
ddpg_dict_data0 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data0))
with open("3v1_/learning_curves/model-prey-01/seed_ddpg/20200912103401/model-prey-01_sucess_record.pkl", 'rb') as fo:
ddpg_dict_data1 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data1))
with open("3v1_/learning_curves/model-prey-02/seed_ddpg/20200912103408/model-prey-02_sucess_record.pkl", 'rb') as fo:
ddpg_dict_data2 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data2))
with open("3v1_/learning_curves/model-prey-03/seed_ddpg/20200912103416/model-prey-03_sucess_record.pkl", 'rb') as fo:
ddpg_dict_data3 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data3 ))
with open("3v1_/learning_curves/model-prey-04/seed_ddpg/20200912103421/model-prey-04_sucess_record.pkl", 'rb') as fo:
ddpg_dict_data4 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data4))
with open("3v1_/learning_curves/model-prey-23/seed_ddpg/20200912103327/model-prey-23_sucess_record.pkl", 'rb') as fo:
ddpg_dict_data5 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data5))
with open("3v1_/learning_curves/model-prey-06/seed_ddpg/20200912103427/model-prey-06_sucess_record.pkl", 'rb') as fo:
ddpg_dict_data6 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data6))
with open("3v1_/learning_curves/model-prey-07/seed_ddpg/20200912103433/model-prey-07_sucess_record.pkl", 'rb') as fo:
ddpg_dict_data7 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data7))
with open("3v1_/learning_curves/model-prey-08/seed_ddpg/20200912103440/model-prey-08_sucess_record.pkl", 'rb') as fo:
ddpg_dict_data8 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data8))
with open("3v1_/learning_curves/model-prey-09/seed_ddpg/20200912103446/model-prey-09_sucess_record.pkl", 'rb') as fo:
ddpg_dict_data9 = pickle.load(fo, encoding='bytes')
print(len(ddpg_dict_data9))
#maddpg
with open("3v1_/learning_curves/model-prey-s/seed_maddpg/20200910205027/model-prey-s_sucess_record.pkl", 'rb') as fo:
maddpg_dict_data0 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data0))
with open("3v1_/learning_curves/model-prey-01/seed_maddpg/20200910205033/model-prey-01_sucess_record.pkl", 'rb') as fo:
maddpg_dict_data1 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data1))
with open("3v1_/learning_curves/model-prey-02/seed_maddpg/20200910205040/model-prey-02_sucess_record.pkl", 'rb') as fo:
maddpg_dict_data2 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data2))
with open("3v1_/learning_curves/model-prey-03/seed_maddpg/20200910205046/model-prey-03_sucess_record.pkl", 'rb') as fo:
maddpg_dict_data3 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data3 ))
with open("3v1_/learning_curves/model-prey-04/seed_maddpg/20200910205052/model-prey-04_sucess_record.pkl", 'rb') as fo:
maddpg_dict_data4 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data4))
with open("3v1_/learning_curves/model-prey-23/seed_maddpg/20200910205019/model-prey-23_sucess_record.pkl", 'rb') as fo:
maddpg_dict_data5 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data5))
with open("3v1_/learning_curves/model-prey-06/seed_maddpg/20200910205104/model-prey-06_sucess_record.pkl", 'rb') as fo:
maddpg_dict_data6 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data6))
with open("3v1_/learning_curves/model-prey-07/seed_maddpg/20200910205135/model-prey-07_sucess_record.pkl", 'rb') as fo:
maddpg_dict_data7 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data7))
with open("3v1_/learning_curves/model-prey-08/seed_maddpg/20200910205147/model-prey-08_sucess_record.pkl", 'rb') as fo:
maddpg_dict_data8 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data8))
with open("3v1_/learning_curves/model-prey-09/seed_maddpg/20200910205155/model-prey-09_sucess_record.pkl", 'rb') as fo:
maddpg_dict_data9 = pickle.load(fo, encoding='bytes')
print(len(maddpg_dict_data9))
smooth_neighbor=5
start=0
# end=min(len(pgddpg_dict_data0),len(pgddpg_dict_data1),len(pgddpg_dict_data2),len(pgddpg_dict_data3),len(pgddpg_dict_data4),len(pgddpg_dict_data5),len(pgddpg_dict_data6),len(pgddpg_dict_data7),len(pgddpg_dict_data8),len(pgddpg_dict_data9),)
end=400
ddpg_vs_prey00 = savitzky_golay(np.array(ddpg_dict_data0[start:end]), smooth_neighbor, 3)
ddpg_vs_prey01 = savitzky_golay(np.array(ddpg_dict_data1[start:end]), smooth_neighbor, 3)
ddpg_vs_prey02 = savitzky_golay(np.array(ddpg_dict_data2[start:end]), smooth_neighbor, 3)
ddpg_vs_prey03 = savitzky_golay(np.array(ddpg_dict_data3[start:end]), smooth_neighbor, 3)
ddpg_vs_prey04 = savitzky_golay(np.array(ddpg_dict_data4[start:end]), smooth_neighbor, 3)
ddpg_vs_prey05 = savitzky_golay(np.array(ddpg_dict_data5[start:end]), smooth_neighbor, 3)
ddpg_vs_prey06 = savitzky_golay(np.array(ddpg_dict_data6[start:end]), smooth_neighbor, 3)
ddpg_vs_prey07 = savitzky_golay(np.array(ddpg_dict_data7[start:end]), smooth_neighbor, 3)
ddpg_vs_prey08 = savitzky_golay(np.array(ddpg_dict_data8[start:end]), smooth_neighbor, 3)
ddpg_vs_prey09 = savitzky_golay(np.array(ddpg_dict_data9[start:end]), smooth_neighbor, 3)
maddpg_vs_prey00 = savitzky_golay(np.array(maddpg_dict_data0[start:end]), smooth_neighbor, 3)
maddpg_vs_prey01 = savitzky_golay(np.array(maddpg_dict_data1[start:end]), smooth_neighbor, 3)
maddpg_vs_prey02 = savitzky_golay(np.array(maddpg_dict_data2[start:end]), smooth_neighbor, 3)
maddpg_vs_prey03 = savitzky_golay(np.array(maddpg_dict_data3[start:end]), smooth_neighbor, 3)
maddpg_vs_prey04 = savitzky_golay(np.array(maddpg_dict_data4[start:end]), smooth_neighbor, 3)
maddpg_vs_prey05 = savitzky_golay(np.array(maddpg_dict_data5[start:end]), smooth_neighbor, 3)
maddpg_vs_prey06 = savitzky_golay(np.array(maddpg_dict_data6[start:end]), smooth_neighbor, 3)
maddpg_vs_prey07 = savitzky_golay(np.array(maddpg_dict_data7[start:end]), smooth_neighbor, 3)
maddpg_vs_prey08 = savitzky_golay(np.array(maddpg_dict_data8[start:end]), smooth_neighbor, 3)
maddpg_vs_prey09 = savitzky_golay(np.array(maddpg_dict_data9[start:end]), smooth_neighbor, 3)
pgddpg_vs_prey00 = savitzky_golay(np.array(pgddpg_dict_data0[start:end]), smooth_neighbor, 3)
pgddpg_vs_prey01 = savitzky_golay(np.array(pgddpg_dict_data1[start:end]), smooth_neighbor, 3)
pgddpg_vs_prey02 = savitzky_golay(np.array(pgddpg_dict_data2[start:end]), smooth_neighbor, 3)
pgddpg_vs_prey03 = savitzky_golay(np.array(pgddpg_dict_data3[start:end]), smooth_neighbor, 3)
pgddpg_vs_prey04 = savitzky_golay(np.array(pgddpg_dict_data4[start:end]), smooth_neighbor, 3)
pgddpg_vs_prey05 = savitzky_golay(np.array(pgddpg_dict_data5[start:end]), smooth_neighbor, 3)
pgddpg_vs_prey06 = savitzky_golay(np.array(pgddpg_dict_data6[start:end]), smooth_neighbor, 3)
pgddpg_vs_prey07 = savitzky_golay(np.array(pgddpg_dict_data7[start:end]), smooth_neighbor, 3)
pgddpg_vs_prey08 = savitzky_golay(np.array(pgddpg_dict_data8[start:end]), smooth_neighbor, 3)
pgddpg_vs_prey09 = savitzky_golay(np.array(pgddpg_dict_data9[start:end]), smooth_neighbor, 3)
print(end)
zz = range(0, end-start)
zz=np.multiply(100, zz)
#ax1 = plt.subplot(2,1,1)
plt.figure()
#pgmaddpg
plt.plot(zz, pgddpg_vs_prey00, label='pgddpg_vs_prey00', linewidth=1, linestyle = "dashed",#prey-s
color='r', marker='o', markerfacecolor='red', markersize=2)
#ddpg
plt.plot(zz, ddpg_vs_prey00, label='ddpg_vs_prey00', linewidth=1, linestyle = "dashed",
color='b', marker='v', markerfacecolor='red', markersize=2)
#maddpg
plt.plot(zz, maddpg_vs_prey00, label='maddpg_vs_prey00', linewidth=1, linestyle = "dashed",#prey-s
color='g', marker='.', markerfacecolor='red', markersize=2)
plt.plot(zz, pgddpg_vs_prey01, label='pgddpg_vs_prey01', linewidth=1, linestyle = "dashed",
color='r', marker='o', markerfacecolor='red', markersize=2)
plt.plot(zz, pgddpg_vs_prey02, label='pgddpg_vs_prey02', linewidth=1, linestyle = "dashed",
color='r', marker='o', markerfacecolor='red', markersize=2)
plt.plot(zz, pgddpg_vs_prey03, label='pgddpg_vs_prey03', linewidth=1, linestyle = "dashed",
color='r', marker='o', markerfacecolor='red', markersize=2)
plt.plot(zz, pgddpg_vs_prey04, label='pgddpg_vs_prey04', linewidth=1, linestyle = "dashed",
color='r', marker='o', markerfacecolor='red', markersize=2)
plt.plot(zz, pgddpg_vs_prey05, label='pgddpg_vs_prey05', linewidth=1, linestyle = "dashed",#prey-23
color='r', marker='o', markerfacecolor='red', markersize=2)
plt.plot(zz, pgddpg_vs_prey06, label='pgddpg_vs_prey06', linewidth=1, linestyle = "dashed",
color='r', marker='o', markerfacecolor='red', markersize=2)
plt.plot(zz, pgddpg_vs_prey07, label='pgddpg_vs_prey07', linewidth=1, linestyle = "dashed",
color='r', marker='o', markerfacecolor='red', markersize=2)
plt.plot(zz, pgddpg_vs_prey08, label='pgddpg_vs_prey08', linewidth=1, linestyle = "dashed",
color='r', marker='o', markerfacecolor='red', markersize=2)
plt.plot(zz, pgddpg_vs_prey09, label='pgddpg_vs_prey09', linewidth=1, linestyle = "dashed",
color='r', marker='o', markerfacecolor='red', markersize=2)
# plt.tick_params(labelsize=23)
# font2 = {'family': 'Times New Roman',
# 'weight': 'normal',
# 'size': 30,
# }
# plt.title('pgddpg',font2)
# plt.xlabel('iteration',font2)
# plt.ylabel('avg_success_rate',font2)
# plt.legend()
# plt.show()
#ddpg
# plt.plot(zz, ddpg_vs_prey00, label='ddpg_vs_prey00', linewidth=1, linestyle = "dashed",
# color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey01, label='ddpg_vs_prey01', linewidth=1, linestyle = "dashed",
color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey02, label='ddpg_vs_prey02', linewidth=1, linestyle = "dashed",
color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey03, label='ddpg_vs_prey03', linewidth=1, linestyle = "dashed",
color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey04, label='ddpg_vs_prey04', linewidth=1, linestyle = "dashed",
color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey05, label='ddpg_vs_prey05', linewidth=1, linestyle = "dashed",
color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey06, label='ddpg_vs_prey06', linewidth=1, linestyle = "dashed",
color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey07, label='ddpg_vs_prey07', linewidth=1, linestyle = "dashed",
color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey08, label='ddpg_vs_prey08', linewidth=1, linestyle = "dashed",
color='b', marker='v', markerfacecolor='red', markersize=2)
plt.plot(zz, ddpg_vs_prey09, label='ddpg_vs_prey09', linewidth=1, linestyle = "dashed",
color='b', marker='v', markerfacecolor='red', markersize=2)
# plt.tick_params(labelsize=23)
# font2 = {'family': 'Times New Roman',
# 'weight': 'normal',
# 'size': 30,
# }
# plt.title('ddpg',font2)
# plt.xlabel('iteration',font2)
# plt.ylabel('avg_success_rate',font2)
# plt.legend()
# plt.show()
#maddpg
# plt.plot(zz, maddpg_vs_prey00, label='maddpg_vs_prey00', linewidth=1, linestyle = "dashed",#prey-s
# color='g', marker='.', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey01, label='maddpg_vs_prey01', linewidth=1, linestyle = "dashed",
color='g', marker='.', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey02, label='maddpg_vs_prey02', linewidth=1, linestyle = "dashed",
color='g', marker='.', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey03, label='maddpg_vs_prey03', linewidth=1, linestyle = "dashed",
color='g', marker='.', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey04, label='maddpg_vs_prey04', linewidth=1, linestyle = "dashed",
color='g', marker='.', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey05, label='maddpg_vs_prey05', linewidth=1, linestyle = "dashed",#prey-23
color='g', marker='.', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey06, label='maddpg_vs_prey06', linewidth=1, linestyle = "dashed",
color='g', marker='.', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey07, label='maddpg_vs_prey07', linewidth=1, linestyle = "dashed",
color='g', marker='.', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey08, label='maddpg_vs_prey08', linewidth=1, linestyle = "dashed",
color='g', marker='.', markerfacecolor='red', markersize=2)
plt.plot(zz, maddpg_vs_prey09, label='maddpg_vs_prey09', linewidth=1, linestyle = "dashed",
color='g', marker='.', markerfacecolor='red', markersize=2)
# plt.tick_params(labelsize=23)
# font2 = {'family': 'Times New Roman',
# 'weight': 'normal',
# 'size': 30,
# }
# plt.title('maddpg',font2)
# plt.xlabel('iteration',font2)
# plt.ylabel('avg_success_rate',font2)
# plt.legend()
# plt.show()
plt.tick_params(labelsize=23)
font2 = {'family': 'Times New Roman',
'weight': 'normal',
'size': 30,
}
plt.title('Different Seeds',font2)
plt.xlabel('Episodes',font2)
plt.ylabel('avg_success_rate',font2)
plt.legend(labels =[r"pgddpg($\beta=0.8$) vs preys",r"ddpg($\alpha=1$) vs preys",r"maddpg($\alpha=5$) vs preys"])
plt.show()
|
flexible
|
{
"blob_id": "8beafcd4f9c02657a828d8c37f2aecda325ba180",
"index": 9439,
"step-1": "<mask token>\n\n\ndef savitzky_golay(y, window_size, order, deriv=0, rate=1):\n order_range = range(order + 1)\n half_window = (window_size - 1) // 2\n b = np.mat([[(k ** i) for i in order_range] for k in range(-half_window,\n half_window + 1)])\n m = np.linalg.pinv(b).A[deriv] * rate ** deriv * factorial(deriv)\n firstvals = y[0] - np.abs(y[1:half_window + 1][::-1] - y[0])\n lastvals = y[-1] + np.abs(y[-half_window - 1:-1][::-1] - y[-1])\n y = np.concatenate((firstvals, y, lastvals))\n return np.convolve(m[::-1], y, mode='valid')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef savitzky_golay(y, window_size, order, deriv=0, rate=1):\n order_range = range(order + 1)\n half_window = (window_size - 1) // 2\n b = np.mat([[(k ** i) for i in order_range] for k in range(-half_window,\n half_window + 1)])\n m = np.linalg.pinv(b).A[deriv] * rate ** deriv * factorial(deriv)\n firstvals = y[0] - np.abs(y[1:half_window + 1][::-1] - y[0])\n lastvals = y[-1] + np.abs(y[-half_window - 1:-1][::-1] - y[-1])\n y = np.concatenate((firstvals, y, lastvals))\n return np.convolve(m[::-1], y, mode='valid')\n\n\nwith open(\n '3v1_/learning_curves/model-prey-s/seed_pgddpg_0.8/pre_trained_prey_20200910204032/model-prey-s_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data0 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data0))\nwith open(\n '3v1_/learning_curves/model-prey-01/seed_pgddpg_0.8/pre_trained_prey_20200910200405/model-prey-01_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data1 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data1))\nwith open(\n '3v1_/learning_curves/model-prey-02/seed_pgddpg_0.8/pre_trained_prey_20200910200419/model-prey-02_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data2 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data2))\nwith open(\n '3v1_/learning_curves/model-prey-03/seed_pgddpg_0.8/pre_trained_prey_20200910200427/model-prey-03_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data3 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data3))\nwith open(\n '3v1_/learning_curves/model-prey-04/seed_pgddpg_0.8/pre_trained_prey_20200910200435/model-prey-04_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data4 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data4))\nwith open(\n '3v1_/learning_curves/model-prey-23/seed_pgddpg_0.8/pre_trained_prey_20200910200115/model-prey-23_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data5 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data5))\nwith open(\n '3v1_/learning_curves/model-prey-06/seed_pgddpg_0.8/pre_trained_prey_20200910200446/model-prey-06_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data6 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data6))\nwith open(\n '3v1_/learning_curves/model-prey-07/seed_pgddpg_0.8/pre_trained_prey_20200910200455/model-prey-07_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data7 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data7))\nwith open(\n '3v1_/learning_curves/model-prey-08/seed_pgddpg_0.8/pre_trained_prey_20200910200504/model-prey-08_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data8 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data8))\nwith open(\n '3v1_/learning_curves/model-prey-09/seed_pgddpg_0.8/pre_trained_prey_20200910200512/model-prey-09_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data9 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data9))\nwith open(\n '3v1_/learning_curves/model-prey-s/seed_ddpg/20200912103349/model-prey-s_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data0 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data0))\nwith open(\n '3v1_/learning_curves/model-prey-01/seed_ddpg/20200912103401/model-prey-01_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data1 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data1))\nwith open(\n '3v1_/learning_curves/model-prey-02/seed_ddpg/20200912103408/model-prey-02_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data2 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data2))\nwith open(\n '3v1_/learning_curves/model-prey-03/seed_ddpg/20200912103416/model-prey-03_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data3 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data3))\nwith open(\n '3v1_/learning_curves/model-prey-04/seed_ddpg/20200912103421/model-prey-04_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data4 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data4))\nwith open(\n '3v1_/learning_curves/model-prey-23/seed_ddpg/20200912103327/model-prey-23_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data5 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data5))\nwith open(\n '3v1_/learning_curves/model-prey-06/seed_ddpg/20200912103427/model-prey-06_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data6 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data6))\nwith open(\n '3v1_/learning_curves/model-prey-07/seed_ddpg/20200912103433/model-prey-07_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data7 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data7))\nwith open(\n '3v1_/learning_curves/model-prey-08/seed_ddpg/20200912103440/model-prey-08_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data8 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data8))\nwith open(\n '3v1_/learning_curves/model-prey-09/seed_ddpg/20200912103446/model-prey-09_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data9 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data9))\nwith open(\n '3v1_/learning_curves/model-prey-s/seed_maddpg/20200910205027/model-prey-s_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data0 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data0))\nwith open(\n '3v1_/learning_curves/model-prey-01/seed_maddpg/20200910205033/model-prey-01_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data1 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data1))\nwith open(\n '3v1_/learning_curves/model-prey-02/seed_maddpg/20200910205040/model-prey-02_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data2 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data2))\nwith open(\n '3v1_/learning_curves/model-prey-03/seed_maddpg/20200910205046/model-prey-03_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data3 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data3))\nwith open(\n '3v1_/learning_curves/model-prey-04/seed_maddpg/20200910205052/model-prey-04_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data4 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data4))\nwith open(\n '3v1_/learning_curves/model-prey-23/seed_maddpg/20200910205019/model-prey-23_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data5 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data5))\nwith open(\n '3v1_/learning_curves/model-prey-06/seed_maddpg/20200910205104/model-prey-06_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data6 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data6))\nwith open(\n '3v1_/learning_curves/model-prey-07/seed_maddpg/20200910205135/model-prey-07_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data7 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data7))\nwith open(\n '3v1_/learning_curves/model-prey-08/seed_maddpg/20200910205147/model-prey-08_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data8 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data8))\nwith open(\n '3v1_/learning_curves/model-prey-09/seed_maddpg/20200910205155/model-prey-09_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data9 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data9))\n<mask token>\nprint(end)\n<mask token>\nplt.figure()\nplt.plot(zz, pgddpg_vs_prey00, label='pgddpg_vs_prey00', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, ddpg_vs_prey00, label='ddpg_vs_prey00', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey00, label='maddpg_vs_prey00', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey01, label='pgddpg_vs_prey01', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey02, label='pgddpg_vs_prey02', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey03, label='pgddpg_vs_prey03', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey04, label='pgddpg_vs_prey04', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey05, label='pgddpg_vs_prey05', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey06, label='pgddpg_vs_prey06', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey07, label='pgddpg_vs_prey07', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey08, label='pgddpg_vs_prey08', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey09, label='pgddpg_vs_prey09', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, ddpg_vs_prey01, label='ddpg_vs_prey01', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey02, label='ddpg_vs_prey02', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey03, label='ddpg_vs_prey03', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey04, label='ddpg_vs_prey04', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey05, label='ddpg_vs_prey05', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey06, label='ddpg_vs_prey06', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey07, label='ddpg_vs_prey07', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey08, label='ddpg_vs_prey08', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey09, label='ddpg_vs_prey09', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey01, label='maddpg_vs_prey01', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey02, label='maddpg_vs_prey02', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey03, label='maddpg_vs_prey03', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey04, label='maddpg_vs_prey04', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey05, label='maddpg_vs_prey05', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey06, label='maddpg_vs_prey06', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey07, label='maddpg_vs_prey07', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey08, label='maddpg_vs_prey08', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey09, label='maddpg_vs_prey09', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.tick_params(labelsize=23)\n<mask token>\nplt.title('Different Seeds', font2)\nplt.xlabel('Episodes', font2)\nplt.ylabel('avg_success_rate', font2)\nplt.legend(labels=['pgddpg($\\\\beta=0.8$) vs preys',\n 'ddpg($\\\\alpha=1$) vs preys', 'maddpg($\\\\alpha=5$) vs preys'])\nplt.show()\n",
"step-3": "<mask token>\n\n\ndef savitzky_golay(y, window_size, order, deriv=0, rate=1):\n order_range = range(order + 1)\n half_window = (window_size - 1) // 2\n b = np.mat([[(k ** i) for i in order_range] for k in range(-half_window,\n half_window + 1)])\n m = np.linalg.pinv(b).A[deriv] * rate ** deriv * factorial(deriv)\n firstvals = y[0] - np.abs(y[1:half_window + 1][::-1] - y[0])\n lastvals = y[-1] + np.abs(y[-half_window - 1:-1][::-1] - y[-1])\n y = np.concatenate((firstvals, y, lastvals))\n return np.convolve(m[::-1], y, mode='valid')\n\n\nwith open(\n '3v1_/learning_curves/model-prey-s/seed_pgddpg_0.8/pre_trained_prey_20200910204032/model-prey-s_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data0 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data0))\nwith open(\n '3v1_/learning_curves/model-prey-01/seed_pgddpg_0.8/pre_trained_prey_20200910200405/model-prey-01_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data1 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data1))\nwith open(\n '3v1_/learning_curves/model-prey-02/seed_pgddpg_0.8/pre_trained_prey_20200910200419/model-prey-02_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data2 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data2))\nwith open(\n '3v1_/learning_curves/model-prey-03/seed_pgddpg_0.8/pre_trained_prey_20200910200427/model-prey-03_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data3 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data3))\nwith open(\n '3v1_/learning_curves/model-prey-04/seed_pgddpg_0.8/pre_trained_prey_20200910200435/model-prey-04_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data4 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data4))\nwith open(\n '3v1_/learning_curves/model-prey-23/seed_pgddpg_0.8/pre_trained_prey_20200910200115/model-prey-23_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data5 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data5))\nwith open(\n '3v1_/learning_curves/model-prey-06/seed_pgddpg_0.8/pre_trained_prey_20200910200446/model-prey-06_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data6 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data6))\nwith open(\n '3v1_/learning_curves/model-prey-07/seed_pgddpg_0.8/pre_trained_prey_20200910200455/model-prey-07_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data7 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data7))\nwith open(\n '3v1_/learning_curves/model-prey-08/seed_pgddpg_0.8/pre_trained_prey_20200910200504/model-prey-08_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data8 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data8))\nwith open(\n '3v1_/learning_curves/model-prey-09/seed_pgddpg_0.8/pre_trained_prey_20200910200512/model-prey-09_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data9 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data9))\nwith open(\n '3v1_/learning_curves/model-prey-s/seed_ddpg/20200912103349/model-prey-s_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data0 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data0))\nwith open(\n '3v1_/learning_curves/model-prey-01/seed_ddpg/20200912103401/model-prey-01_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data1 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data1))\nwith open(\n '3v1_/learning_curves/model-prey-02/seed_ddpg/20200912103408/model-prey-02_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data2 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data2))\nwith open(\n '3v1_/learning_curves/model-prey-03/seed_ddpg/20200912103416/model-prey-03_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data3 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data3))\nwith open(\n '3v1_/learning_curves/model-prey-04/seed_ddpg/20200912103421/model-prey-04_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data4 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data4))\nwith open(\n '3v1_/learning_curves/model-prey-23/seed_ddpg/20200912103327/model-prey-23_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data5 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data5))\nwith open(\n '3v1_/learning_curves/model-prey-06/seed_ddpg/20200912103427/model-prey-06_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data6 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data6))\nwith open(\n '3v1_/learning_curves/model-prey-07/seed_ddpg/20200912103433/model-prey-07_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data7 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data7))\nwith open(\n '3v1_/learning_curves/model-prey-08/seed_ddpg/20200912103440/model-prey-08_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data8 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data8))\nwith open(\n '3v1_/learning_curves/model-prey-09/seed_ddpg/20200912103446/model-prey-09_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data9 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data9))\nwith open(\n '3v1_/learning_curves/model-prey-s/seed_maddpg/20200910205027/model-prey-s_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data0 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data0))\nwith open(\n '3v1_/learning_curves/model-prey-01/seed_maddpg/20200910205033/model-prey-01_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data1 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data1))\nwith open(\n '3v1_/learning_curves/model-prey-02/seed_maddpg/20200910205040/model-prey-02_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data2 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data2))\nwith open(\n '3v1_/learning_curves/model-prey-03/seed_maddpg/20200910205046/model-prey-03_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data3 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data3))\nwith open(\n '3v1_/learning_curves/model-prey-04/seed_maddpg/20200910205052/model-prey-04_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data4 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data4))\nwith open(\n '3v1_/learning_curves/model-prey-23/seed_maddpg/20200910205019/model-prey-23_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data5 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data5))\nwith open(\n '3v1_/learning_curves/model-prey-06/seed_maddpg/20200910205104/model-prey-06_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data6 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data6))\nwith open(\n '3v1_/learning_curves/model-prey-07/seed_maddpg/20200910205135/model-prey-07_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data7 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data7))\nwith open(\n '3v1_/learning_curves/model-prey-08/seed_maddpg/20200910205147/model-prey-08_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data8 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data8))\nwith open(\n '3v1_/learning_curves/model-prey-09/seed_maddpg/20200910205155/model-prey-09_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data9 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data9))\nsmooth_neighbor = 5\nstart = 0\nend = 400\nddpg_vs_prey00 = savitzky_golay(np.array(ddpg_dict_data0[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey01 = savitzky_golay(np.array(ddpg_dict_data1[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey02 = savitzky_golay(np.array(ddpg_dict_data2[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey03 = savitzky_golay(np.array(ddpg_dict_data3[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey04 = savitzky_golay(np.array(ddpg_dict_data4[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey05 = savitzky_golay(np.array(ddpg_dict_data5[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey06 = savitzky_golay(np.array(ddpg_dict_data6[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey07 = savitzky_golay(np.array(ddpg_dict_data7[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey08 = savitzky_golay(np.array(ddpg_dict_data8[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey09 = savitzky_golay(np.array(ddpg_dict_data9[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey00 = savitzky_golay(np.array(maddpg_dict_data0[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey01 = savitzky_golay(np.array(maddpg_dict_data1[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey02 = savitzky_golay(np.array(maddpg_dict_data2[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey03 = savitzky_golay(np.array(maddpg_dict_data3[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey04 = savitzky_golay(np.array(maddpg_dict_data4[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey05 = savitzky_golay(np.array(maddpg_dict_data5[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey06 = savitzky_golay(np.array(maddpg_dict_data6[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey07 = savitzky_golay(np.array(maddpg_dict_data7[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey08 = savitzky_golay(np.array(maddpg_dict_data8[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey09 = savitzky_golay(np.array(maddpg_dict_data9[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey00 = savitzky_golay(np.array(pgddpg_dict_data0[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey01 = savitzky_golay(np.array(pgddpg_dict_data1[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey02 = savitzky_golay(np.array(pgddpg_dict_data2[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey03 = savitzky_golay(np.array(pgddpg_dict_data3[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey04 = savitzky_golay(np.array(pgddpg_dict_data4[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey05 = savitzky_golay(np.array(pgddpg_dict_data5[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey06 = savitzky_golay(np.array(pgddpg_dict_data6[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey07 = savitzky_golay(np.array(pgddpg_dict_data7[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey08 = savitzky_golay(np.array(pgddpg_dict_data8[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey09 = savitzky_golay(np.array(pgddpg_dict_data9[start:end]),\n smooth_neighbor, 3)\nprint(end)\nzz = range(0, end - start)\nzz = np.multiply(100, zz)\nplt.figure()\nplt.plot(zz, pgddpg_vs_prey00, label='pgddpg_vs_prey00', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, ddpg_vs_prey00, label='ddpg_vs_prey00', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey00, label='maddpg_vs_prey00', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey01, label='pgddpg_vs_prey01', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey02, label='pgddpg_vs_prey02', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey03, label='pgddpg_vs_prey03', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey04, label='pgddpg_vs_prey04', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey05, label='pgddpg_vs_prey05', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey06, label='pgddpg_vs_prey06', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey07, label='pgddpg_vs_prey07', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey08, label='pgddpg_vs_prey08', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey09, label='pgddpg_vs_prey09', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, ddpg_vs_prey01, label='ddpg_vs_prey01', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey02, label='ddpg_vs_prey02', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey03, label='ddpg_vs_prey03', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey04, label='ddpg_vs_prey04', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey05, label='ddpg_vs_prey05', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey06, label='ddpg_vs_prey06', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey07, label='ddpg_vs_prey07', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey08, label='ddpg_vs_prey08', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey09, label='ddpg_vs_prey09', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey01, label='maddpg_vs_prey01', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey02, label='maddpg_vs_prey02', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey03, label='maddpg_vs_prey03', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey04, label='maddpg_vs_prey04', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey05, label='maddpg_vs_prey05', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey06, label='maddpg_vs_prey06', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey07, label='maddpg_vs_prey07', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey08, label='maddpg_vs_prey08', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey09, label='maddpg_vs_prey09', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.tick_params(labelsize=23)\nfont2 = {'family': 'Times New Roman', 'weight': 'normal', 'size': 30}\nplt.title('Different Seeds', font2)\nplt.xlabel('Episodes', font2)\nplt.ylabel('avg_success_rate', font2)\nplt.legend(labels=['pgddpg($\\\\beta=0.8$) vs preys',\n 'ddpg($\\\\alpha=1$) vs preys', 'maddpg($\\\\alpha=5$) vs preys'])\nplt.show()\n",
"step-4": "import pickle\nfrom numpy import *\nimport math\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom matplotlib import animation\nfrom math import factorial\n\n\ndef savitzky_golay(y, window_size, order, deriv=0, rate=1):\n order_range = range(order + 1)\n half_window = (window_size - 1) // 2\n b = np.mat([[(k ** i) for i in order_range] for k in range(-half_window,\n half_window + 1)])\n m = np.linalg.pinv(b).A[deriv] * rate ** deriv * factorial(deriv)\n firstvals = y[0] - np.abs(y[1:half_window + 1][::-1] - y[0])\n lastvals = y[-1] + np.abs(y[-half_window - 1:-1][::-1] - y[-1])\n y = np.concatenate((firstvals, y, lastvals))\n return np.convolve(m[::-1], y, mode='valid')\n\n\nwith open(\n '3v1_/learning_curves/model-prey-s/seed_pgddpg_0.8/pre_trained_prey_20200910204032/model-prey-s_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data0 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data0))\nwith open(\n '3v1_/learning_curves/model-prey-01/seed_pgddpg_0.8/pre_trained_prey_20200910200405/model-prey-01_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data1 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data1))\nwith open(\n '3v1_/learning_curves/model-prey-02/seed_pgddpg_0.8/pre_trained_prey_20200910200419/model-prey-02_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data2 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data2))\nwith open(\n '3v1_/learning_curves/model-prey-03/seed_pgddpg_0.8/pre_trained_prey_20200910200427/model-prey-03_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data3 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data3))\nwith open(\n '3v1_/learning_curves/model-prey-04/seed_pgddpg_0.8/pre_trained_prey_20200910200435/model-prey-04_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data4 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data4))\nwith open(\n '3v1_/learning_curves/model-prey-23/seed_pgddpg_0.8/pre_trained_prey_20200910200115/model-prey-23_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data5 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data5))\nwith open(\n '3v1_/learning_curves/model-prey-06/seed_pgddpg_0.8/pre_trained_prey_20200910200446/model-prey-06_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data6 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data6))\nwith open(\n '3v1_/learning_curves/model-prey-07/seed_pgddpg_0.8/pre_trained_prey_20200910200455/model-prey-07_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data7 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data7))\nwith open(\n '3v1_/learning_curves/model-prey-08/seed_pgddpg_0.8/pre_trained_prey_20200910200504/model-prey-08_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data8 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data8))\nwith open(\n '3v1_/learning_curves/model-prey-09/seed_pgddpg_0.8/pre_trained_prey_20200910200512/model-prey-09_sucess_record.pkl'\n , 'rb') as fo:\n pgddpg_dict_data9 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data9))\nwith open(\n '3v1_/learning_curves/model-prey-s/seed_ddpg/20200912103349/model-prey-s_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data0 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data0))\nwith open(\n '3v1_/learning_curves/model-prey-01/seed_ddpg/20200912103401/model-prey-01_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data1 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data1))\nwith open(\n '3v1_/learning_curves/model-prey-02/seed_ddpg/20200912103408/model-prey-02_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data2 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data2))\nwith open(\n '3v1_/learning_curves/model-prey-03/seed_ddpg/20200912103416/model-prey-03_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data3 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data3))\nwith open(\n '3v1_/learning_curves/model-prey-04/seed_ddpg/20200912103421/model-prey-04_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data4 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data4))\nwith open(\n '3v1_/learning_curves/model-prey-23/seed_ddpg/20200912103327/model-prey-23_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data5 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data5))\nwith open(\n '3v1_/learning_curves/model-prey-06/seed_ddpg/20200912103427/model-prey-06_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data6 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data6))\nwith open(\n '3v1_/learning_curves/model-prey-07/seed_ddpg/20200912103433/model-prey-07_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data7 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data7))\nwith open(\n '3v1_/learning_curves/model-prey-08/seed_ddpg/20200912103440/model-prey-08_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data8 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data8))\nwith open(\n '3v1_/learning_curves/model-prey-09/seed_ddpg/20200912103446/model-prey-09_sucess_record.pkl'\n , 'rb') as fo:\n ddpg_dict_data9 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data9))\nwith open(\n '3v1_/learning_curves/model-prey-s/seed_maddpg/20200910205027/model-prey-s_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data0 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data0))\nwith open(\n '3v1_/learning_curves/model-prey-01/seed_maddpg/20200910205033/model-prey-01_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data1 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data1))\nwith open(\n '3v1_/learning_curves/model-prey-02/seed_maddpg/20200910205040/model-prey-02_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data2 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data2))\nwith open(\n '3v1_/learning_curves/model-prey-03/seed_maddpg/20200910205046/model-prey-03_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data3 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data3))\nwith open(\n '3v1_/learning_curves/model-prey-04/seed_maddpg/20200910205052/model-prey-04_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data4 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data4))\nwith open(\n '3v1_/learning_curves/model-prey-23/seed_maddpg/20200910205019/model-prey-23_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data5 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data5))\nwith open(\n '3v1_/learning_curves/model-prey-06/seed_maddpg/20200910205104/model-prey-06_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data6 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data6))\nwith open(\n '3v1_/learning_curves/model-prey-07/seed_maddpg/20200910205135/model-prey-07_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data7 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data7))\nwith open(\n '3v1_/learning_curves/model-prey-08/seed_maddpg/20200910205147/model-prey-08_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data8 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data8))\nwith open(\n '3v1_/learning_curves/model-prey-09/seed_maddpg/20200910205155/model-prey-09_sucess_record.pkl'\n , 'rb') as fo:\n maddpg_dict_data9 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data9))\nsmooth_neighbor = 5\nstart = 0\nend = 400\nddpg_vs_prey00 = savitzky_golay(np.array(ddpg_dict_data0[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey01 = savitzky_golay(np.array(ddpg_dict_data1[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey02 = savitzky_golay(np.array(ddpg_dict_data2[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey03 = savitzky_golay(np.array(ddpg_dict_data3[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey04 = savitzky_golay(np.array(ddpg_dict_data4[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey05 = savitzky_golay(np.array(ddpg_dict_data5[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey06 = savitzky_golay(np.array(ddpg_dict_data6[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey07 = savitzky_golay(np.array(ddpg_dict_data7[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey08 = savitzky_golay(np.array(ddpg_dict_data8[start:end]),\n smooth_neighbor, 3)\nddpg_vs_prey09 = savitzky_golay(np.array(ddpg_dict_data9[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey00 = savitzky_golay(np.array(maddpg_dict_data0[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey01 = savitzky_golay(np.array(maddpg_dict_data1[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey02 = savitzky_golay(np.array(maddpg_dict_data2[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey03 = savitzky_golay(np.array(maddpg_dict_data3[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey04 = savitzky_golay(np.array(maddpg_dict_data4[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey05 = savitzky_golay(np.array(maddpg_dict_data5[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey06 = savitzky_golay(np.array(maddpg_dict_data6[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey07 = savitzky_golay(np.array(maddpg_dict_data7[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey08 = savitzky_golay(np.array(maddpg_dict_data8[start:end]),\n smooth_neighbor, 3)\nmaddpg_vs_prey09 = savitzky_golay(np.array(maddpg_dict_data9[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey00 = savitzky_golay(np.array(pgddpg_dict_data0[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey01 = savitzky_golay(np.array(pgddpg_dict_data1[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey02 = savitzky_golay(np.array(pgddpg_dict_data2[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey03 = savitzky_golay(np.array(pgddpg_dict_data3[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey04 = savitzky_golay(np.array(pgddpg_dict_data4[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey05 = savitzky_golay(np.array(pgddpg_dict_data5[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey06 = savitzky_golay(np.array(pgddpg_dict_data6[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey07 = savitzky_golay(np.array(pgddpg_dict_data7[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey08 = savitzky_golay(np.array(pgddpg_dict_data8[start:end]),\n smooth_neighbor, 3)\npgddpg_vs_prey09 = savitzky_golay(np.array(pgddpg_dict_data9[start:end]),\n smooth_neighbor, 3)\nprint(end)\nzz = range(0, end - start)\nzz = np.multiply(100, zz)\nplt.figure()\nplt.plot(zz, pgddpg_vs_prey00, label='pgddpg_vs_prey00', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, ddpg_vs_prey00, label='ddpg_vs_prey00', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey00, label='maddpg_vs_prey00', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey01, label='pgddpg_vs_prey01', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey02, label='pgddpg_vs_prey02', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey03, label='pgddpg_vs_prey03', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey04, label='pgddpg_vs_prey04', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey05, label='pgddpg_vs_prey05', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey06, label='pgddpg_vs_prey06', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey07, label='pgddpg_vs_prey07', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey08, label='pgddpg_vs_prey08', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, pgddpg_vs_prey09, label='pgddpg_vs_prey09', linewidth=1,\n linestyle='dashed', color='r', marker='o', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, ddpg_vs_prey01, label='ddpg_vs_prey01', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey02, label='ddpg_vs_prey02', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey03, label='ddpg_vs_prey03', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey04, label='ddpg_vs_prey04', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey05, label='ddpg_vs_prey05', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey06, label='ddpg_vs_prey06', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey07, label='ddpg_vs_prey07', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey08, label='ddpg_vs_prey08', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey09, label='ddpg_vs_prey09', linewidth=1, linestyle\n ='dashed', color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey01, label='maddpg_vs_prey01', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey02, label='maddpg_vs_prey02', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey03, label='maddpg_vs_prey03', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey04, label='maddpg_vs_prey04', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey05, label='maddpg_vs_prey05', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey06, label='maddpg_vs_prey06', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey07, label='maddpg_vs_prey07', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey08, label='maddpg_vs_prey08', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.plot(zz, maddpg_vs_prey09, label='maddpg_vs_prey09', linewidth=1,\n linestyle='dashed', color='g', marker='.', markerfacecolor='red',\n markersize=2)\nplt.tick_params(labelsize=23)\nfont2 = {'family': 'Times New Roman', 'weight': 'normal', 'size': 30}\nplt.title('Different Seeds', font2)\nplt.xlabel('Episodes', font2)\nplt.ylabel('avg_success_rate', font2)\nplt.legend(labels=['pgddpg($\\\\beta=0.8$) vs preys',\n 'ddpg($\\\\alpha=1$) vs preys', 'maddpg($\\\\alpha=5$) vs preys'])\nplt.show()\n",
"step-5": "import pickle\nfrom numpy import *\nimport math\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom matplotlib import animation\nfrom math import factorial\n\ndef savitzky_golay(y, window_size, order, deriv=0, rate=1):\n order_range = range(order+1)\n half_window = (window_size -1) // 2\n b = np.mat([[k**i for i in order_range] for k in range(-half_window, half_window+1)])\n m = np.linalg.pinv(b).A[deriv] * rate**deriv * factorial(deriv)\n firstvals = y[0] - np.abs( y[1:half_window+1][::-1] - y[0] )\n lastvals = y[-1] + np.abs(y[-half_window-1:-1][::-1] - y[-1])\n y = np.concatenate((firstvals, y, lastvals))\n return np.convolve( m[::-1], y, mode='valid')\n#pgddpg\nwith open(\"3v1_/learning_curves/model-prey-s/seed_pgddpg_0.8/pre_trained_prey_20200910204032/model-prey-s_sucess_record.pkl\", 'rb') as fo: \n pgddpg_dict_data0 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data0))\nwith open(\"3v1_/learning_curves/model-prey-01/seed_pgddpg_0.8/pre_trained_prey_20200910200405/model-prey-01_sucess_record.pkl\", 'rb') as fo: \n pgddpg_dict_data1 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data1))\nwith open(\"3v1_/learning_curves/model-prey-02/seed_pgddpg_0.8/pre_trained_prey_20200910200419/model-prey-02_sucess_record.pkl\", 'rb') as fo: \n pgddpg_dict_data2 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data2))\nwith open(\"3v1_/learning_curves/model-prey-03/seed_pgddpg_0.8/pre_trained_prey_20200910200427/model-prey-03_sucess_record.pkl\", 'rb') as fo: \n pgddpg_dict_data3 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data3 ))\nwith open(\"3v1_/learning_curves/model-prey-04/seed_pgddpg_0.8/pre_trained_prey_20200910200435/model-prey-04_sucess_record.pkl\", 'rb') as fo: \n pgddpg_dict_data4 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data4))\nwith open(\"3v1_/learning_curves/model-prey-23/seed_pgddpg_0.8/pre_trained_prey_20200910200115/model-prey-23_sucess_record.pkl\", 'rb') as fo: \n pgddpg_dict_data5 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data5))\nwith open(\"3v1_/learning_curves/model-prey-06/seed_pgddpg_0.8/pre_trained_prey_20200910200446/model-prey-06_sucess_record.pkl\", 'rb') as fo: \n pgddpg_dict_data6 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data6))\nwith open(\"3v1_/learning_curves/model-prey-07/seed_pgddpg_0.8/pre_trained_prey_20200910200455/model-prey-07_sucess_record.pkl\", 'rb') as fo: \n pgddpg_dict_data7 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data7))\nwith open(\"3v1_/learning_curves/model-prey-08/seed_pgddpg_0.8/pre_trained_prey_20200910200504/model-prey-08_sucess_record.pkl\", 'rb') as fo: \n pgddpg_dict_data8 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data8))\nwith open(\"3v1_/learning_curves/model-prey-09/seed_pgddpg_0.8/pre_trained_prey_20200910200512/model-prey-09_sucess_record.pkl\", 'rb') as fo: \n pgddpg_dict_data9 = pickle.load(fo, encoding='bytes')\n print(len(pgddpg_dict_data9))\n\n#ddpg\nwith open(\"3v1_/learning_curves/model-prey-s/seed_ddpg/20200912103349/model-prey-s_sucess_record.pkl\", 'rb') as fo: \n ddpg_dict_data0 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data0))\nwith open(\"3v1_/learning_curves/model-prey-01/seed_ddpg/20200912103401/model-prey-01_sucess_record.pkl\", 'rb') as fo: \n ddpg_dict_data1 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data1))\nwith open(\"3v1_/learning_curves/model-prey-02/seed_ddpg/20200912103408/model-prey-02_sucess_record.pkl\", 'rb') as fo: \n ddpg_dict_data2 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data2))\nwith open(\"3v1_/learning_curves/model-prey-03/seed_ddpg/20200912103416/model-prey-03_sucess_record.pkl\", 'rb') as fo: \n ddpg_dict_data3 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data3 ))\nwith open(\"3v1_/learning_curves/model-prey-04/seed_ddpg/20200912103421/model-prey-04_sucess_record.pkl\", 'rb') as fo: \n ddpg_dict_data4 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data4))\nwith open(\"3v1_/learning_curves/model-prey-23/seed_ddpg/20200912103327/model-prey-23_sucess_record.pkl\", 'rb') as fo: \n ddpg_dict_data5 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data5))\nwith open(\"3v1_/learning_curves/model-prey-06/seed_ddpg/20200912103427/model-prey-06_sucess_record.pkl\", 'rb') as fo: \n ddpg_dict_data6 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data6))\nwith open(\"3v1_/learning_curves/model-prey-07/seed_ddpg/20200912103433/model-prey-07_sucess_record.pkl\", 'rb') as fo: \n ddpg_dict_data7 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data7))\nwith open(\"3v1_/learning_curves/model-prey-08/seed_ddpg/20200912103440/model-prey-08_sucess_record.pkl\", 'rb') as fo: \n ddpg_dict_data8 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data8))\nwith open(\"3v1_/learning_curves/model-prey-09/seed_ddpg/20200912103446/model-prey-09_sucess_record.pkl\", 'rb') as fo: \n ddpg_dict_data9 = pickle.load(fo, encoding='bytes')\n print(len(ddpg_dict_data9))\n\n#maddpg\nwith open(\"3v1_/learning_curves/model-prey-s/seed_maddpg/20200910205027/model-prey-s_sucess_record.pkl\", 'rb') as fo: \n maddpg_dict_data0 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data0))\nwith open(\"3v1_/learning_curves/model-prey-01/seed_maddpg/20200910205033/model-prey-01_sucess_record.pkl\", 'rb') as fo: \n maddpg_dict_data1 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data1))\nwith open(\"3v1_/learning_curves/model-prey-02/seed_maddpg/20200910205040/model-prey-02_sucess_record.pkl\", 'rb') as fo: \n maddpg_dict_data2 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data2))\nwith open(\"3v1_/learning_curves/model-prey-03/seed_maddpg/20200910205046/model-prey-03_sucess_record.pkl\", 'rb') as fo: \n maddpg_dict_data3 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data3 ))\nwith open(\"3v1_/learning_curves/model-prey-04/seed_maddpg/20200910205052/model-prey-04_sucess_record.pkl\", 'rb') as fo: \n maddpg_dict_data4 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data4))\nwith open(\"3v1_/learning_curves/model-prey-23/seed_maddpg/20200910205019/model-prey-23_sucess_record.pkl\", 'rb') as fo: \n maddpg_dict_data5 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data5))\nwith open(\"3v1_/learning_curves/model-prey-06/seed_maddpg/20200910205104/model-prey-06_sucess_record.pkl\", 'rb') as fo: \n maddpg_dict_data6 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data6))\nwith open(\"3v1_/learning_curves/model-prey-07/seed_maddpg/20200910205135/model-prey-07_sucess_record.pkl\", 'rb') as fo: \n maddpg_dict_data7 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data7))\nwith open(\"3v1_/learning_curves/model-prey-08/seed_maddpg/20200910205147/model-prey-08_sucess_record.pkl\", 'rb') as fo: \n maddpg_dict_data8 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data8))\nwith open(\"3v1_/learning_curves/model-prey-09/seed_maddpg/20200910205155/model-prey-09_sucess_record.pkl\", 'rb') as fo: \n maddpg_dict_data9 = pickle.load(fo, encoding='bytes')\n print(len(maddpg_dict_data9))\n\n\n\nsmooth_neighbor=5\nstart=0\n# end=min(len(pgddpg_dict_data0),len(pgddpg_dict_data1),len(pgddpg_dict_data2),len(pgddpg_dict_data3),len(pgddpg_dict_data4),len(pgddpg_dict_data5),len(pgddpg_dict_data6),len(pgddpg_dict_data7),len(pgddpg_dict_data8),len(pgddpg_dict_data9),)\nend=400\n\nddpg_vs_prey00 = savitzky_golay(np.array(ddpg_dict_data0[start:end]), smooth_neighbor, 3) \nddpg_vs_prey01 = savitzky_golay(np.array(ddpg_dict_data1[start:end]), smooth_neighbor, 3) \nddpg_vs_prey02 = savitzky_golay(np.array(ddpg_dict_data2[start:end]), smooth_neighbor, 3) \nddpg_vs_prey03 = savitzky_golay(np.array(ddpg_dict_data3[start:end]), smooth_neighbor, 3) \nddpg_vs_prey04 = savitzky_golay(np.array(ddpg_dict_data4[start:end]), smooth_neighbor, 3) \nddpg_vs_prey05 = savitzky_golay(np.array(ddpg_dict_data5[start:end]), smooth_neighbor, 3) \nddpg_vs_prey06 = savitzky_golay(np.array(ddpg_dict_data6[start:end]), smooth_neighbor, 3) \nddpg_vs_prey07 = savitzky_golay(np.array(ddpg_dict_data7[start:end]), smooth_neighbor, 3) \nddpg_vs_prey08 = savitzky_golay(np.array(ddpg_dict_data8[start:end]), smooth_neighbor, 3) \nddpg_vs_prey09 = savitzky_golay(np.array(ddpg_dict_data9[start:end]), smooth_neighbor, 3) \n\nmaddpg_vs_prey00 = savitzky_golay(np.array(maddpg_dict_data0[start:end]), smooth_neighbor, 3) \nmaddpg_vs_prey01 = savitzky_golay(np.array(maddpg_dict_data1[start:end]), smooth_neighbor, 3) \nmaddpg_vs_prey02 = savitzky_golay(np.array(maddpg_dict_data2[start:end]), smooth_neighbor, 3) \nmaddpg_vs_prey03 = savitzky_golay(np.array(maddpg_dict_data3[start:end]), smooth_neighbor, 3) \nmaddpg_vs_prey04 = savitzky_golay(np.array(maddpg_dict_data4[start:end]), smooth_neighbor, 3) \nmaddpg_vs_prey05 = savitzky_golay(np.array(maddpg_dict_data5[start:end]), smooth_neighbor, 3) \nmaddpg_vs_prey06 = savitzky_golay(np.array(maddpg_dict_data6[start:end]), smooth_neighbor, 3) \nmaddpg_vs_prey07 = savitzky_golay(np.array(maddpg_dict_data7[start:end]), smooth_neighbor, 3) \nmaddpg_vs_prey08 = savitzky_golay(np.array(maddpg_dict_data8[start:end]), smooth_neighbor, 3) \nmaddpg_vs_prey09 = savitzky_golay(np.array(maddpg_dict_data9[start:end]), smooth_neighbor, 3) \n\npgddpg_vs_prey00 = savitzky_golay(np.array(pgddpg_dict_data0[start:end]), smooth_neighbor, 3) \npgddpg_vs_prey01 = savitzky_golay(np.array(pgddpg_dict_data1[start:end]), smooth_neighbor, 3) \npgddpg_vs_prey02 = savitzky_golay(np.array(pgddpg_dict_data2[start:end]), smooth_neighbor, 3) \npgddpg_vs_prey03 = savitzky_golay(np.array(pgddpg_dict_data3[start:end]), smooth_neighbor, 3) \npgddpg_vs_prey04 = savitzky_golay(np.array(pgddpg_dict_data4[start:end]), smooth_neighbor, 3) \npgddpg_vs_prey05 = savitzky_golay(np.array(pgddpg_dict_data5[start:end]), smooth_neighbor, 3) \npgddpg_vs_prey06 = savitzky_golay(np.array(pgddpg_dict_data6[start:end]), smooth_neighbor, 3) \npgddpg_vs_prey07 = savitzky_golay(np.array(pgddpg_dict_data7[start:end]), smooth_neighbor, 3) \npgddpg_vs_prey08 = savitzky_golay(np.array(pgddpg_dict_data8[start:end]), smooth_neighbor, 3) \npgddpg_vs_prey09 = savitzky_golay(np.array(pgddpg_dict_data9[start:end]), smooth_neighbor, 3) \n\nprint(end)\n\nzz = range(0, end-start)\nzz=np.multiply(100, zz)\n#ax1 = plt.subplot(2,1,1)\n\nplt.figure()\n\n#pgmaddpg\nplt.plot(zz, pgddpg_vs_prey00, label='pgddpg_vs_prey00', linewidth=1, linestyle = \"dashed\",#prey-s\n color='r', marker='o', markerfacecolor='red', markersize=2)\n#ddpg\nplt.plot(zz, ddpg_vs_prey00, label='ddpg_vs_prey00', linewidth=1, linestyle = \"dashed\",\n color='b', marker='v', markerfacecolor='red', markersize=2)\n#maddpg\nplt.plot(zz, maddpg_vs_prey00, label='maddpg_vs_prey00', linewidth=1, linestyle = \"dashed\",#prey-s\n color='g', marker='.', markerfacecolor='red', markersize=2)\nplt.plot(zz, pgddpg_vs_prey01, label='pgddpg_vs_prey01', linewidth=1, linestyle = \"dashed\",\n color='r', marker='o', markerfacecolor='red', markersize=2)\nplt.plot(zz, pgddpg_vs_prey02, label='pgddpg_vs_prey02', linewidth=1, linestyle = \"dashed\",\n color='r', marker='o', markerfacecolor='red', markersize=2)\nplt.plot(zz, pgddpg_vs_prey03, label='pgddpg_vs_prey03', linewidth=1, linestyle = \"dashed\",\n color='r', marker='o', markerfacecolor='red', markersize=2)\nplt.plot(zz, pgddpg_vs_prey04, label='pgddpg_vs_prey04', linewidth=1, linestyle = \"dashed\",\n color='r', marker='o', markerfacecolor='red', markersize=2)\nplt.plot(zz, pgddpg_vs_prey05, label='pgddpg_vs_prey05', linewidth=1, linestyle = \"dashed\",#prey-23\n color='r', marker='o', markerfacecolor='red', markersize=2)\nplt.plot(zz, pgddpg_vs_prey06, label='pgddpg_vs_prey06', linewidth=1, linestyle = \"dashed\",\n color='r', marker='o', markerfacecolor='red', markersize=2)\nplt.plot(zz, pgddpg_vs_prey07, label='pgddpg_vs_prey07', linewidth=1, linestyle = \"dashed\",\n color='r', marker='o', markerfacecolor='red', markersize=2)\nplt.plot(zz, pgddpg_vs_prey08, label='pgddpg_vs_prey08', linewidth=1, linestyle = \"dashed\",\n color='r', marker='o', markerfacecolor='red', markersize=2)\nplt.plot(zz, pgddpg_vs_prey09, label='pgddpg_vs_prey09', linewidth=1, linestyle = \"dashed\",\n color='r', marker='o', markerfacecolor='red', markersize=2)\n\n\n# plt.tick_params(labelsize=23)\n\n\n# font2 = {'family': 'Times New Roman',\n# 'weight': 'normal',\n# 'size': 30,\n# }\n\n# plt.title('pgddpg',font2)\n# plt.xlabel('iteration',font2)\n# plt.ylabel('avg_success_rate',font2)\n# plt.legend()\n# plt.show()\n#ddpg\n# plt.plot(zz, ddpg_vs_prey00, label='ddpg_vs_prey00', linewidth=1, linestyle = \"dashed\",\n# color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey01, label='ddpg_vs_prey01', linewidth=1, linestyle = \"dashed\",\n color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey02, label='ddpg_vs_prey02', linewidth=1, linestyle = \"dashed\",\n color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey03, label='ddpg_vs_prey03', linewidth=1, linestyle = \"dashed\",\n color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey04, label='ddpg_vs_prey04', linewidth=1, linestyle = \"dashed\",\n color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey05, label='ddpg_vs_prey05', linewidth=1, linestyle = \"dashed\",\n color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey06, label='ddpg_vs_prey06', linewidth=1, linestyle = \"dashed\",\n color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey07, label='ddpg_vs_prey07', linewidth=1, linestyle = \"dashed\",\n color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey08, label='ddpg_vs_prey08', linewidth=1, linestyle = \"dashed\",\n color='b', marker='v', markerfacecolor='red', markersize=2)\nplt.plot(zz, ddpg_vs_prey09, label='ddpg_vs_prey09', linewidth=1, linestyle = \"dashed\",\n color='b', marker='v', markerfacecolor='red', markersize=2)\n\n\n# plt.tick_params(labelsize=23)\n\n\n# font2 = {'family': 'Times New Roman',\n# 'weight': 'normal',\n# 'size': 30,\n# }\n\n# plt.title('ddpg',font2)\n# plt.xlabel('iteration',font2)\n# plt.ylabel('avg_success_rate',font2)\n# plt.legend()\n# plt.show()\n#maddpg\n# plt.plot(zz, maddpg_vs_prey00, label='maddpg_vs_prey00', linewidth=1, linestyle = \"dashed\",#prey-s\n# color='g', marker='.', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey01, label='maddpg_vs_prey01', linewidth=1, linestyle = \"dashed\",\n color='g', marker='.', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey02, label='maddpg_vs_prey02', linewidth=1, linestyle = \"dashed\",\n color='g', marker='.', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey03, label='maddpg_vs_prey03', linewidth=1, linestyle = \"dashed\",\n color='g', marker='.', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey04, label='maddpg_vs_prey04', linewidth=1, linestyle = \"dashed\",\n color='g', marker='.', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey05, label='maddpg_vs_prey05', linewidth=1, linestyle = \"dashed\",#prey-23\n color='g', marker='.', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey06, label='maddpg_vs_prey06', linewidth=1, linestyle = \"dashed\",\n color='g', marker='.', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey07, label='maddpg_vs_prey07', linewidth=1, linestyle = \"dashed\",\n color='g', marker='.', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey08, label='maddpg_vs_prey08', linewidth=1, linestyle = \"dashed\",\n color='g', marker='.', markerfacecolor='red', markersize=2)\nplt.plot(zz, maddpg_vs_prey09, label='maddpg_vs_prey09', linewidth=1, linestyle = \"dashed\",\n color='g', marker='.', markerfacecolor='red', markersize=2)\n\n\n# plt.tick_params(labelsize=23)\n\n\n# font2 = {'family': 'Times New Roman',\n# 'weight': 'normal',\n# 'size': 30,\n# }\n\n# plt.title('maddpg',font2)\n# plt.xlabel('iteration',font2)\n# plt.ylabel('avg_success_rate',font2)\n# plt.legend()\n# plt.show()\n\nplt.tick_params(labelsize=23)\nfont2 = {'family': 'Times New Roman',\n 'weight': 'normal',\n 'size': 30,\n }\n\nplt.title('Different Seeds',font2)\nplt.xlabel('Episodes',font2)\nplt.ylabel('avg_success_rate',font2)\nplt.legend(labels =[r\"pgddpg($\\beta=0.8$) vs preys\",r\"ddpg($\\alpha=1$) vs preys\",r\"maddpg($\\alpha=5$) vs preys\"])\nplt.show()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
st.merge()
st.detrend(type='demean')
st.remove_response()
st.filter('bandpass', freqmin=F1, freqmax=F2, corners=4)
st.trim(t1, t2)
<|reserved_special_token_0|>
plt.suptitle(LABEL)
<|reserved_special_token_0|>
ax.plot(st[0].times(reftime=orig_time), st[0].data * 1000, linewidth=0.2,
color='darkred')
<|reserved_special_token_0|>
for phase in PHASES:
phase = [phase]
tt = model.get_travel_times(source_depth_in_km=EVT_Z,
distance_in_degree=dist, phase_list=phase)
ax.vlines(tt[0].time, ymin, ymax, color='blue', linewidth=1.2, zorder=3,
linestyle='--', alpha=0.5)
ax.text(tt[0].time * 1.02, ymax, phase[0], fontsize=12,
horizontalalignment='left', verticalalignment='top')
ax.set_xlabel('Time after earthquake (s)')
ax.set_title("""{:}.{:}.{:}.{:}
Bandpass filter: {:}-{:} Hz""".format(st[0]
.stats.network, st[0].stats.station, st[0].stats.location, st[0].stats.
channel, F1, F2))
ax.set_ylabel('Ground velocity (mm/s)')
<|reserved_special_token_0|>
ax3.set_title('Epicentral distance: {:3.1f}$^\\circ$'.format(dist))
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
plt.savefig('traces.png')
plt.show()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
NETWORK = 'AM'
STATION = 'RAEBE'
CHANNEL = 'EHZ'
EQ_TIME = '2020-01-07T08:24:26'
T_START = 0
T_END = 1250
PHASES = ['P', 'S']
EVT_LAT = 17.916
EVT_LON = -66.813
EVT_Z = 10
STA_LAT = 51.33
STA_LON = -0.49
F1 = 0.3
F2 = 0.7
LABEL = 'M 6.4 Puerto Rico'
MODEL = 'iasp91'
client = Client('http://fdsnws.raspberryshakedata.com')
orig_time = UTCDateTime(EQ_TIME)
t1 = orig_time - T_START
t2 = orig_time + T_END
st = client.get_waveforms(NETWORK, STATION, '00', CHANNEL, starttime=t1,
endtime=t2, attach_response=True)
st.merge()
st.detrend(type='demean')
st.remove_response()
st.filter('bandpass', freqmin=F1, freqmax=F2, corners=4)
st.trim(t1, t2)
fig = plt.figure(figsize=(12, 8))
plt.suptitle(LABEL)
ax = plt.subplot(121)
dist = locations2degrees(EVT_LAT, EVT_LON, STA_LAT, STA_LON)
model = TauPyModel(model=MODEL)
ax.plot(st[0].times(reftime=orig_time), st[0].data * 1000, linewidth=0.2,
color='darkred')
ymin, ymax = ax.get_ylim()
for phase in PHASES:
phase = [phase]
tt = model.get_travel_times(source_depth_in_km=EVT_Z,
distance_in_degree=dist, phase_list=phase)
ax.vlines(tt[0].time, ymin, ymax, color='blue', linewidth=1.2, zorder=3,
linestyle='--', alpha=0.5)
ax.text(tt[0].time * 1.02, ymax, phase[0], fontsize=12,
horizontalalignment='left', verticalalignment='top')
ax.set_xlabel('Time after earthquake (s)')
ax.set_title("""{:}.{:}.{:}.{:}
Bandpass filter: {:}-{:} Hz""".format(st[0]
.stats.network, st[0].stats.station, st[0].stats.location, st[0].stats.
channel, F1, F2))
ax.set_ylabel('Ground velocity (mm/s)')
ax2 = plt.subplot(122, projection='polar')
arrivals = model.get_ray_paths(source_depth_in_km=EVT_Z, distance_in_degree
=dist, phase_list=PHASES)
ax3 = arrivals.plot_rays(phase, legend=False, ax=ax2, show=False,
label_arrivals=True)
ax3.set_title('Epicentral distance: {:3.1f}$^\\circ$'.format(dist))
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
plt.savefig('traces.png')
plt.show()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from obspy.clients.fdsn import Client
from obspy import UTCDateTime
from obspy.taup import TauPyModel
from obspy.geodetics.base import locations2degrees
import matplotlib.pyplot as plt
NETWORK = 'AM'
STATION = 'RAEBE'
CHANNEL = 'EHZ'
EQ_TIME = '2020-01-07T08:24:26'
T_START = 0
T_END = 1250
PHASES = ['P', 'S']
EVT_LAT = 17.916
EVT_LON = -66.813
EVT_Z = 10
STA_LAT = 51.33
STA_LON = -0.49
F1 = 0.3
F2 = 0.7
LABEL = 'M 6.4 Puerto Rico'
MODEL = 'iasp91'
client = Client('http://fdsnws.raspberryshakedata.com')
orig_time = UTCDateTime(EQ_TIME)
t1 = orig_time - T_START
t2 = orig_time + T_END
st = client.get_waveforms(NETWORK, STATION, '00', CHANNEL, starttime=t1,
endtime=t2, attach_response=True)
st.merge()
st.detrend(type='demean')
st.remove_response()
st.filter('bandpass', freqmin=F1, freqmax=F2, corners=4)
st.trim(t1, t2)
fig = plt.figure(figsize=(12, 8))
plt.suptitle(LABEL)
ax = plt.subplot(121)
dist = locations2degrees(EVT_LAT, EVT_LON, STA_LAT, STA_LON)
model = TauPyModel(model=MODEL)
ax.plot(st[0].times(reftime=orig_time), st[0].data * 1000, linewidth=0.2,
color='darkred')
ymin, ymax = ax.get_ylim()
for phase in PHASES:
phase = [phase]
tt = model.get_travel_times(source_depth_in_km=EVT_Z,
distance_in_degree=dist, phase_list=phase)
ax.vlines(tt[0].time, ymin, ymax, color='blue', linewidth=1.2, zorder=3,
linestyle='--', alpha=0.5)
ax.text(tt[0].time * 1.02, ymax, phase[0], fontsize=12,
horizontalalignment='left', verticalalignment='top')
ax.set_xlabel('Time after earthquake (s)')
ax.set_title("""{:}.{:}.{:}.{:}
Bandpass filter: {:}-{:} Hz""".format(st[0]
.stats.network, st[0].stats.station, st[0].stats.location, st[0].stats.
channel, F1, F2))
ax.set_ylabel('Ground velocity (mm/s)')
ax2 = plt.subplot(122, projection='polar')
arrivals = model.get_ray_paths(source_depth_in_km=EVT_Z, distance_in_degree
=dist, phase_list=PHASES)
ax3 = arrivals.plot_rays(phase, legend=False, ax=ax2, show=False,
label_arrivals=True)
ax3.set_title('Epicentral distance: {:3.1f}$^\\circ$'.format(dist))
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
plt.savefig('traces.png')
plt.show()
<|reserved_special_token_1|>
#!/usr/bin/env python
"""
Script to download and plot RaspberryShake station data
Also computes and plots theoretical phase arrival times and raypaths.
See https://docs.obspy.org/packages/obspy.taup.html for more info on
Earth models and phase-nmaing nomenclature.
Stephen Hicks
Imperial College London
Feb 2020
"""
from obspy.clients.fdsn import Client
from obspy import UTCDateTime
from obspy.taup import TauPyModel
from obspy.geodetics.base import locations2degrees
import matplotlib.pyplot as plt
# Start of parameters to define
NETWORK = "AM" # AM = RaspberryShake network
STATION = "RAEBE" # Station code of station to get data for
CHANNEL = "EHZ" # channel to grab data for (e.g. EHZ, SHZ, EHE, EHN)
EQ_TIME = "2020-01-07T08:24:26" # origin time of earthquake
T_START = 0 # Length in seconds of data to plot before origin time
T_END = 1250 # Length in seconds of data to plot after origin time
PHASES = ["P", "S"] # list of phases to compute theoretical times for
EVT_LAT = 17.916 # Latitude of event
EVT_LON = -66.813 # Longitude of event
EVT_Z = 10 # Depth of event
STA_LAT = 51.33 # Latitude of station
STA_LON = -0.49 # Longitude of station
F1 = 0.3 # High-pass filter corner
F2 = 0.7 # Low-pass filter corner
LABEL = "M 6.4 Puerto Rico" # Title to plot on figure
MODEL = 'iasp91' # Velocity model to predict travel-times through
# End of parameters to define
# Define fdsn client to get data from
client = Client('http://fdsnws.raspberryshakedata.com')
# Define start and end time
orig_time = UTCDateTime(EQ_TIME)
t1 = orig_time - T_START
t2 = orig_time + T_END
# Download and filfter data
st = client.get_waveforms(NETWORK, STATION, "00", CHANNEL,
starttime=t1, endtime=t2, attach_response=True)
st.merge()
st.detrend(type="demean")
st.remove_response()
st.filter("bandpass", freqmin=F1, freqmax=F2, corners=4)
st.trim(t1, t2)
# Set-up figure
fig = plt.figure(figsize=(12, 8))
plt.suptitle(LABEL)
ax = plt.subplot(121)
# Set-up taup travel-time model
dist = locations2degrees(EVT_LAT, EVT_LON, STA_LAT, STA_LON)
model = TauPyModel(model=MODEL)
# Now plot the waveform data
ax.plot(st[0].times(reftime=orig_time), st[0].data*1000, linewidth=0.2,
color="darkred")
ymin, ymax = ax.get_ylim()
# Now plot the theoretical arrival times
for phase in PHASES:
phase = [phase]
tt = model.get_travel_times(source_depth_in_km=EVT_Z,
distance_in_degree=dist,
phase_list=phase)
ax.vlines(tt[0].time, ymin, ymax, color="blue",
linewidth=1.2, zorder=3, linestyle="--", alpha=0.5)
ax.text(tt[0].time*1.02, ymax, phase[0], fontsize=12,
horizontalalignment="left", verticalalignment="top")
ax.set_xlabel("Time after earthquake (s)")
ax.set_title("{:}.{:}.{:}.{:}\nBandpass filter: {:}-{:} Hz".format(
st[0].stats.network, st[0].stats.station, st[0].stats.location,
st[0].stats.channel, F1, F2))
ax.set_ylabel("Ground velocity (mm/s)")
# Now plot the raypaths through the Earth
ax2 = plt.subplot(122, projection='polar')
arrivals = model.get_ray_paths(
source_depth_in_km=EVT_Z, distance_in_degree=dist,
phase_list=PHASES)
ax3 = arrivals.plot_rays(phase, legend=False, ax=ax2, show=False,
label_arrivals=True)
ax3.set_title("Epicentral distance: {:3.1f}$^\circ$".format(dist))
# Save and plot the figure
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
plt.savefig("traces.png")
plt.show()
|
flexible
|
{
"blob_id": "8d8ea6ad7a3ed1a1e6e96ab75260ecf6e8211d32",
"index": 1305,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nst.merge()\nst.detrend(type='demean')\nst.remove_response()\nst.filter('bandpass', freqmin=F1, freqmax=F2, corners=4)\nst.trim(t1, t2)\n<mask token>\nplt.suptitle(LABEL)\n<mask token>\nax.plot(st[0].times(reftime=orig_time), st[0].data * 1000, linewidth=0.2,\n color='darkred')\n<mask token>\nfor phase in PHASES:\n phase = [phase]\n tt = model.get_travel_times(source_depth_in_km=EVT_Z,\n distance_in_degree=dist, phase_list=phase)\n ax.vlines(tt[0].time, ymin, ymax, color='blue', linewidth=1.2, zorder=3,\n linestyle='--', alpha=0.5)\n ax.text(tt[0].time * 1.02, ymax, phase[0], fontsize=12,\n horizontalalignment='left', verticalalignment='top')\nax.set_xlabel('Time after earthquake (s)')\nax.set_title(\"\"\"{:}.{:}.{:}.{:}\nBandpass filter: {:}-{:} Hz\"\"\".format(st[0]\n .stats.network, st[0].stats.station, st[0].stats.location, st[0].stats.\n channel, F1, F2))\nax.set_ylabel('Ground velocity (mm/s)')\n<mask token>\nax3.set_title('Epicentral distance: {:3.1f}$^\\\\circ$'.format(dist))\nplt.tight_layout(rect=[0, 0.03, 1, 0.95])\nplt.savefig('traces.png')\nplt.show()\n",
"step-3": "<mask token>\nNETWORK = 'AM'\nSTATION = 'RAEBE'\nCHANNEL = 'EHZ'\nEQ_TIME = '2020-01-07T08:24:26'\nT_START = 0\nT_END = 1250\nPHASES = ['P', 'S']\nEVT_LAT = 17.916\nEVT_LON = -66.813\nEVT_Z = 10\nSTA_LAT = 51.33\nSTA_LON = -0.49\nF1 = 0.3\nF2 = 0.7\nLABEL = 'M 6.4 Puerto Rico'\nMODEL = 'iasp91'\nclient = Client('http://fdsnws.raspberryshakedata.com')\norig_time = UTCDateTime(EQ_TIME)\nt1 = orig_time - T_START\nt2 = orig_time + T_END\nst = client.get_waveforms(NETWORK, STATION, '00', CHANNEL, starttime=t1,\n endtime=t2, attach_response=True)\nst.merge()\nst.detrend(type='demean')\nst.remove_response()\nst.filter('bandpass', freqmin=F1, freqmax=F2, corners=4)\nst.trim(t1, t2)\nfig = plt.figure(figsize=(12, 8))\nplt.suptitle(LABEL)\nax = plt.subplot(121)\ndist = locations2degrees(EVT_LAT, EVT_LON, STA_LAT, STA_LON)\nmodel = TauPyModel(model=MODEL)\nax.plot(st[0].times(reftime=orig_time), st[0].data * 1000, linewidth=0.2,\n color='darkred')\nymin, ymax = ax.get_ylim()\nfor phase in PHASES:\n phase = [phase]\n tt = model.get_travel_times(source_depth_in_km=EVT_Z,\n distance_in_degree=dist, phase_list=phase)\n ax.vlines(tt[0].time, ymin, ymax, color='blue', linewidth=1.2, zorder=3,\n linestyle='--', alpha=0.5)\n ax.text(tt[0].time * 1.02, ymax, phase[0], fontsize=12,\n horizontalalignment='left', verticalalignment='top')\nax.set_xlabel('Time after earthquake (s)')\nax.set_title(\"\"\"{:}.{:}.{:}.{:}\nBandpass filter: {:}-{:} Hz\"\"\".format(st[0]\n .stats.network, st[0].stats.station, st[0].stats.location, st[0].stats.\n channel, F1, F2))\nax.set_ylabel('Ground velocity (mm/s)')\nax2 = plt.subplot(122, projection='polar')\narrivals = model.get_ray_paths(source_depth_in_km=EVT_Z, distance_in_degree\n =dist, phase_list=PHASES)\nax3 = arrivals.plot_rays(phase, legend=False, ax=ax2, show=False,\n label_arrivals=True)\nax3.set_title('Epicentral distance: {:3.1f}$^\\\\circ$'.format(dist))\nplt.tight_layout(rect=[0, 0.03, 1, 0.95])\nplt.savefig('traces.png')\nplt.show()\n",
"step-4": "<mask token>\nfrom obspy.clients.fdsn import Client\nfrom obspy import UTCDateTime\nfrom obspy.taup import TauPyModel\nfrom obspy.geodetics.base import locations2degrees\nimport matplotlib.pyplot as plt\nNETWORK = 'AM'\nSTATION = 'RAEBE'\nCHANNEL = 'EHZ'\nEQ_TIME = '2020-01-07T08:24:26'\nT_START = 0\nT_END = 1250\nPHASES = ['P', 'S']\nEVT_LAT = 17.916\nEVT_LON = -66.813\nEVT_Z = 10\nSTA_LAT = 51.33\nSTA_LON = -0.49\nF1 = 0.3\nF2 = 0.7\nLABEL = 'M 6.4 Puerto Rico'\nMODEL = 'iasp91'\nclient = Client('http://fdsnws.raspberryshakedata.com')\norig_time = UTCDateTime(EQ_TIME)\nt1 = orig_time - T_START\nt2 = orig_time + T_END\nst = client.get_waveforms(NETWORK, STATION, '00', CHANNEL, starttime=t1,\n endtime=t2, attach_response=True)\nst.merge()\nst.detrend(type='demean')\nst.remove_response()\nst.filter('bandpass', freqmin=F1, freqmax=F2, corners=4)\nst.trim(t1, t2)\nfig = plt.figure(figsize=(12, 8))\nplt.suptitle(LABEL)\nax = plt.subplot(121)\ndist = locations2degrees(EVT_LAT, EVT_LON, STA_LAT, STA_LON)\nmodel = TauPyModel(model=MODEL)\nax.plot(st[0].times(reftime=orig_time), st[0].data * 1000, linewidth=0.2,\n color='darkred')\nymin, ymax = ax.get_ylim()\nfor phase in PHASES:\n phase = [phase]\n tt = model.get_travel_times(source_depth_in_km=EVT_Z,\n distance_in_degree=dist, phase_list=phase)\n ax.vlines(tt[0].time, ymin, ymax, color='blue', linewidth=1.2, zorder=3,\n linestyle='--', alpha=0.5)\n ax.text(tt[0].time * 1.02, ymax, phase[0], fontsize=12,\n horizontalalignment='left', verticalalignment='top')\nax.set_xlabel('Time after earthquake (s)')\nax.set_title(\"\"\"{:}.{:}.{:}.{:}\nBandpass filter: {:}-{:} Hz\"\"\".format(st[0]\n .stats.network, st[0].stats.station, st[0].stats.location, st[0].stats.\n channel, F1, F2))\nax.set_ylabel('Ground velocity (mm/s)')\nax2 = plt.subplot(122, projection='polar')\narrivals = model.get_ray_paths(source_depth_in_km=EVT_Z, distance_in_degree\n =dist, phase_list=PHASES)\nax3 = arrivals.plot_rays(phase, legend=False, ax=ax2, show=False,\n label_arrivals=True)\nax3.set_title('Epicentral distance: {:3.1f}$^\\\\circ$'.format(dist))\nplt.tight_layout(rect=[0, 0.03, 1, 0.95])\nplt.savefig('traces.png')\nplt.show()\n",
"step-5": "#!/usr/bin/env python\n\"\"\"\nScript to download and plot RaspberryShake station data\nAlso computes and plots theoretical phase arrival times and raypaths.\n\nSee https://docs.obspy.org/packages/obspy.taup.html for more info on \nEarth models and phase-nmaing nomenclature.\n\nStephen Hicks\nImperial College London\nFeb 2020\n\"\"\"\n\nfrom obspy.clients.fdsn import Client\nfrom obspy import UTCDateTime\nfrom obspy.taup import TauPyModel\nfrom obspy.geodetics.base import locations2degrees\nimport matplotlib.pyplot as plt\n\n# Start of parameters to define\nNETWORK = \"AM\" # AM = RaspberryShake network\nSTATION = \"RAEBE\" # Station code of station to get data for\nCHANNEL = \"EHZ\" # channel to grab data for (e.g. EHZ, SHZ, EHE, EHN)\nEQ_TIME = \"2020-01-07T08:24:26\" # origin time of earthquake\nT_START = 0 # Length in seconds of data to plot before origin time\nT_END = 1250 # Length in seconds of data to plot after origin time\nPHASES = [\"P\", \"S\"] # list of phases to compute theoretical times for\nEVT_LAT = 17.916 # Latitude of event\nEVT_LON = -66.813 # Longitude of event \nEVT_Z = 10 # Depth of event\nSTA_LAT = 51.33 # Latitude of station \nSTA_LON = -0.49 # Longitude of station\nF1 = 0.3 # High-pass filter corner\nF2 = 0.7 # Low-pass filter corner \nLABEL = \"M 6.4 Puerto Rico\" # Title to plot on figure\nMODEL = 'iasp91' # Velocity model to predict travel-times through\n# End of parameters to define\n\n# Define fdsn client to get data from\nclient = Client('http://fdsnws.raspberryshakedata.com')\n\n# Define start and end time\norig_time = UTCDateTime(EQ_TIME)\nt1 = orig_time - T_START\nt2 = orig_time + T_END\n# Download and filfter data\nst = client.get_waveforms(NETWORK, STATION, \"00\", CHANNEL,\n starttime=t1, endtime=t2, attach_response=True)\nst.merge()\nst.detrend(type=\"demean\")\nst.remove_response()\nst.filter(\"bandpass\", freqmin=F1, freqmax=F2, corners=4)\nst.trim(t1, t2)\n\n# Set-up figure\nfig = plt.figure(figsize=(12, 8))\nplt.suptitle(LABEL)\nax = plt.subplot(121)\n\n# Set-up taup travel-time model\ndist = locations2degrees(EVT_LAT, EVT_LON, STA_LAT, STA_LON)\nmodel = TauPyModel(model=MODEL)\n\n# Now plot the waveform data\nax.plot(st[0].times(reftime=orig_time), st[0].data*1000, linewidth=0.2,\n color=\"darkred\")\nymin, ymax = ax.get_ylim()\n\n# Now plot the theoretical arrival times\nfor phase in PHASES:\n phase = [phase]\n tt = model.get_travel_times(source_depth_in_km=EVT_Z,\n distance_in_degree=dist,\n phase_list=phase)\n ax.vlines(tt[0].time, ymin, ymax, color=\"blue\",\n linewidth=1.2, zorder=3, linestyle=\"--\", alpha=0.5)\n ax.text(tt[0].time*1.02, ymax, phase[0], fontsize=12,\n horizontalalignment=\"left\", verticalalignment=\"top\")\nax.set_xlabel(\"Time after earthquake (s)\")\nax.set_title(\"{:}.{:}.{:}.{:}\\nBandpass filter: {:}-{:} Hz\".format(\n st[0].stats.network, st[0].stats.station, st[0].stats.location,\n st[0].stats.channel, F1, F2))\nax.set_ylabel(\"Ground velocity (mm/s)\")\n\n# Now plot the raypaths through the Earth\nax2 = plt.subplot(122, projection='polar')\narrivals = model.get_ray_paths(\n source_depth_in_km=EVT_Z, distance_in_degree=dist,\n phase_list=PHASES)\nax3 = arrivals.plot_rays(phase, legend=False, ax=ax2, show=False,\n label_arrivals=True)\nax3.set_title(\"Epicentral distance: {:3.1f}$^\\circ$\".format(dist))\n\n# Save and plot the figure\nplt.tight_layout(rect=[0, 0.03, 1, 0.95])\nplt.savefig(\"traces.png\")\nplt.show()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def add_sub_path(yaml_path):
file = open(yaml_path, 'r', encoding='utf-8')
file_data = file.read()
file.close()
data = yaml.safe_load(file_data)
for p, p_info in data.get('paths', {}).items():
for method, m_info in p_info.items():
url_path = m_info['x-bk-apigateway-resource']['backend']['path']
m_info['x-bk-apigateway-resource']['backend']['path'
] = '{}{}'.format('/{env.api_sub_path}', url_path[0:])
file = open(yaml_path, 'w')
yaml.dump(data, file)
file.close()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def add_sub_path(yaml_path):
file = open(yaml_path, 'r', encoding='utf-8')
file_data = file.read()
file.close()
data = yaml.safe_load(file_data)
for p, p_info in data.get('paths', {}).items():
for method, m_info in p_info.items():
url_path = m_info['x-bk-apigateway-resource']['backend']['path']
m_info['x-bk-apigateway-resource']['backend']['path'
] = '{}{}'.format('/{env.api_sub_path}', url_path[0:])
file = open(yaml_path, 'w')
yaml.dump(data, file)
file.close()
if __name__ == '__main__':
path = sys.argv[1]
add_sub_path(path)
<|reserved_special_token_1|>
import sys
import yaml
def add_sub_path(yaml_path):
file = open(yaml_path, 'r', encoding='utf-8')
file_data = file.read()
file.close()
data = yaml.safe_load(file_data)
for p, p_info in data.get('paths', {}).items():
for method, m_info in p_info.items():
url_path = m_info['x-bk-apigateway-resource']['backend']['path']
m_info['x-bk-apigateway-resource']['backend']['path'
] = '{}{}'.format('/{env.api_sub_path}', url_path[0:])
file = open(yaml_path, 'w')
yaml.dump(data, file)
file.close()
if __name__ == '__main__':
path = sys.argv[1]
add_sub_path(path)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
import sys
import yaml
def add_sub_path(yaml_path):
file = open(yaml_path, "r", encoding="utf-8")
file_data = file.read()
file.close()
data = yaml.safe_load(file_data)
for p, p_info in data.get("paths", {}).items():
for method, m_info in p_info.items():
url_path = m_info["x-bk-apigateway-resource"]["backend"]["path"]
m_info["x-bk-apigateway-resource"]["backend"]["path"] = "{}{}".format(
"/{env.api_sub_path}", url_path[0:]
)
file = open(yaml_path, "w")
yaml.dump(data, file)
file.close()
if __name__ == "__main__":
# 为所有path添加env.api_sub_path前缀
path = sys.argv[1]
add_sub_path(path)
|
flexible
|
{
"blob_id": "bbd50c40bc0897fe7a93f277bcfdcba3ba6d6f2a",
"index": 1531,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef add_sub_path(yaml_path):\n file = open(yaml_path, 'r', encoding='utf-8')\n file_data = file.read()\n file.close()\n data = yaml.safe_load(file_data)\n for p, p_info in data.get('paths', {}).items():\n for method, m_info in p_info.items():\n url_path = m_info['x-bk-apigateway-resource']['backend']['path']\n m_info['x-bk-apigateway-resource']['backend']['path'\n ] = '{}{}'.format('/{env.api_sub_path}', url_path[0:])\n file = open(yaml_path, 'w')\n yaml.dump(data, file)\n file.close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef add_sub_path(yaml_path):\n file = open(yaml_path, 'r', encoding='utf-8')\n file_data = file.read()\n file.close()\n data = yaml.safe_load(file_data)\n for p, p_info in data.get('paths', {}).items():\n for method, m_info in p_info.items():\n url_path = m_info['x-bk-apigateway-resource']['backend']['path']\n m_info['x-bk-apigateway-resource']['backend']['path'\n ] = '{}{}'.format('/{env.api_sub_path}', url_path[0:])\n file = open(yaml_path, 'w')\n yaml.dump(data, file)\n file.close()\n\n\nif __name__ == '__main__':\n path = sys.argv[1]\n add_sub_path(path)\n",
"step-4": "import sys\nimport yaml\n\n\ndef add_sub_path(yaml_path):\n file = open(yaml_path, 'r', encoding='utf-8')\n file_data = file.read()\n file.close()\n data = yaml.safe_load(file_data)\n for p, p_info in data.get('paths', {}).items():\n for method, m_info in p_info.items():\n url_path = m_info['x-bk-apigateway-resource']['backend']['path']\n m_info['x-bk-apigateway-resource']['backend']['path'\n ] = '{}{}'.format('/{env.api_sub_path}', url_path[0:])\n file = open(yaml_path, 'w')\n yaml.dump(data, file)\n file.close()\n\n\nif __name__ == '__main__':\n path = sys.argv[1]\n add_sub_path(path)\n",
"step-5": "# -*- coding: utf-8 -*-\nimport sys\nimport yaml\n\n\ndef add_sub_path(yaml_path):\n file = open(yaml_path, \"r\", encoding=\"utf-8\")\n file_data = file.read()\n file.close()\n\n data = yaml.safe_load(file_data)\n\n for p, p_info in data.get(\"paths\", {}).items():\n for method, m_info in p_info.items():\n url_path = m_info[\"x-bk-apigateway-resource\"][\"backend\"][\"path\"]\n m_info[\"x-bk-apigateway-resource\"][\"backend\"][\"path\"] = \"{}{}\".format(\n \"/{env.api_sub_path}\", url_path[0:]\n )\n\n file = open(yaml_path, \"w\")\n yaml.dump(data, file)\n file.close()\n\n\nif __name__ == \"__main__\":\n # 为所有path添加env.api_sub_path前缀\n path = sys.argv[1]\n add_sub_path(path)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import turtle
def draw_square():
conrad = turtle.Turtle()
conrad.shape("turtle")
conrad.color("red")
conrad.speed(3)
i = 0
while(i < 4):
conrad.forward(200)
conrad.right(90)
i += 1
def draw_circle():
niki = turtle.Turtle()
niki.circle(50)
def draw_triangle():
tri = turtle.Turtle()
tri.shape("turtle")
i = 0
while(i < 3):
tri.forward(135)
tri.right(145)
i += 1
def main():
window = turtle.Screen()
window.bgcolor("blue")
draw_square()
draw_circle()
draw_triangle()
window.exitonclick()
main()
|
normal
|
{
"blob_id": "9a982e0ab7fff882767a98ed01f5ed68bd710888",
"index": 7433,
"step-1": "<mask token>\n\n\ndef draw_circle():\n niki = turtle.Turtle()\n niki.circle(50)\n\n\ndef draw_triangle():\n tri = turtle.Turtle()\n tri.shape('turtle')\n i = 0\n while i < 3:\n tri.forward(135)\n tri.right(145)\n i += 1\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef draw_square():\n conrad = turtle.Turtle()\n conrad.shape('turtle')\n conrad.color('red')\n conrad.speed(3)\n i = 0\n while i < 4:\n conrad.forward(200)\n conrad.right(90)\n i += 1\n\n\ndef draw_circle():\n niki = turtle.Turtle()\n niki.circle(50)\n\n\ndef draw_triangle():\n tri = turtle.Turtle()\n tri.shape('turtle')\n i = 0\n while i < 3:\n tri.forward(135)\n tri.right(145)\n i += 1\n\n\ndef main():\n window = turtle.Screen()\n window.bgcolor('blue')\n draw_square()\n draw_circle()\n draw_triangle()\n window.exitonclick()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef draw_square():\n conrad = turtle.Turtle()\n conrad.shape('turtle')\n conrad.color('red')\n conrad.speed(3)\n i = 0\n while i < 4:\n conrad.forward(200)\n conrad.right(90)\n i += 1\n\n\ndef draw_circle():\n niki = turtle.Turtle()\n niki.circle(50)\n\n\ndef draw_triangle():\n tri = turtle.Turtle()\n tri.shape('turtle')\n i = 0\n while i < 3:\n tri.forward(135)\n tri.right(145)\n i += 1\n\n\ndef main():\n window = turtle.Screen()\n window.bgcolor('blue')\n draw_square()\n draw_circle()\n draw_triangle()\n window.exitonclick()\n\n\nmain()\n",
"step-4": "import turtle\n\n\ndef draw_square():\n conrad = turtle.Turtle()\n conrad.shape('turtle')\n conrad.color('red')\n conrad.speed(3)\n i = 0\n while i < 4:\n conrad.forward(200)\n conrad.right(90)\n i += 1\n\n\ndef draw_circle():\n niki = turtle.Turtle()\n niki.circle(50)\n\n\ndef draw_triangle():\n tri = turtle.Turtle()\n tri.shape('turtle')\n i = 0\n while i < 3:\n tri.forward(135)\n tri.right(145)\n i += 1\n\n\ndef main():\n window = turtle.Screen()\n window.bgcolor('blue')\n draw_square()\n draw_circle()\n draw_triangle()\n window.exitonclick()\n\n\nmain()\n",
"step-5": "import turtle\n\ndef draw_square():\n\t\n\tconrad = turtle.Turtle()\n\tconrad.shape(\"turtle\")\n\tconrad.color(\"red\")\n\tconrad.speed(3)\n\n\ti = 0\n\twhile(i < 4):\n\t\tconrad.forward(200)\n\t\tconrad.right(90)\n\t\ti += 1\n\ndef draw_circle():\n\t\n\tniki = turtle.Turtle()\n\tniki.circle(50)\n\ndef draw_triangle():\n\t\n\ttri = turtle.Turtle()\n\ttri.shape(\"turtle\")\n\n\ti = 0\n\twhile(i < 3):\n\t\ttri.forward(135)\n\t\ttri.right(145)\n\t\ti += 1\n\ndef main():\n\twindow = turtle.Screen()\n\twindow.bgcolor(\"blue\")\n\t\n\tdraw_square()\n\tdraw_circle()\n\tdraw_triangle()\n\twindow.exitonclick()\n\nmain()",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
# def add(a,b):
# x = a + b
#
# # the return value gets assigned to the "result" variable
# result = add(3,5)
# print result # this should print 8
#
# def multiply(arr,num):
# for x in range(len(arr)):
# arr[x] *= num
# return arr
#
# a = [2,4,10,16]
# b = multiply(a,5)
# print b
#
#
# dog = ("Canis Familiaris", "dog", "carnivore", 12)
# dog = dog + ("domestic",)
# dog = dog[:3] + ("man's best friend",) + dog[4:]
# print dog
# print sorted(dog)
#
# import math
#
# def get_circle_area(r):
# #Return (circumference, area) of a circle of radius r
# c = 2 * math.pi * r
# a = math.pi * r * r
# return (c, a)
#
# print get_circle_area(5)
#
# weekend = {"Sun": "Sunday", "Mon": "Monday"}
# print weekend.values()
# context = {
# 'questions': [
# { 'id': 1, 'content': 'Why is there a light in the fridge and not in the freezer?'},
# { 'id': 2, 'content': 'Why don\'t sheep shrink when it rains?'},
# { 'id': 3, 'content': 'Why are they called apartments when they are all stuck together?'},
# { 'id': 4, 'content': 'Why do cars drive on the parkway and park on the driveway?'}
# ]
# }
#
# for key, data in context.items():
# #print data
# for value in data:
# print "Question #", value["id"], ": ", value["content"]
# print "----"
# data = {"house":"Haus","cat":"Katze","red":"rot"}
# print data.values()
dishes = ["pizza", "sauerkraut", "paella", "hamburger"]
countries = ["Italy", "Germany", "Spain", "USA"]
country_specialties = zip(countries, dishes)
# print country_specialties
country_specialties_dict = dict(country_specialties)
print country_specialties_dict
|
normal
|
{
"blob_id": "e24c3f6ce2e65305f955dcede9edc0b497f6e74c",
"index": 2880,
"step-1": "# def add(a,b):\n# x = a + b\n#\n# # the return value gets assigned to the \"result\" variable\n# result = add(3,5)\n# print result # this should print 8\n#\n# def multiply(arr,num):\n# for x in range(len(arr)):\n# arr[x] *= num\n# return arr\n#\n# a = [2,4,10,16]\n# b = multiply(a,5)\n# print b\n#\n#\n# dog = (\"Canis Familiaris\", \"dog\", \"carnivore\", 12)\n# dog = dog + (\"domestic\",)\n# dog = dog[:3] + (\"man's best friend\",) + dog[4:]\n# print dog\n# print sorted(dog)\n#\n# import math\n#\n# def get_circle_area(r):\n# #Return (circumference, area) of a circle of radius r\n# c = 2 * math.pi * r\n# a = math.pi * r * r\n# return (c, a)\n#\n# print get_circle_area(5)\n#\n# weekend = {\"Sun\": \"Sunday\", \"Mon\": \"Monday\"}\n# print weekend.values()\n\n# context = {\n# 'questions': [\n# { 'id': 1, 'content': 'Why is there a light in the fridge and not in the freezer?'},\n# { 'id': 2, 'content': 'Why don\\'t sheep shrink when it rains?'},\n# { 'id': 3, 'content': 'Why are they called apartments when they are all stuck together?'},\n# { 'id': 4, 'content': 'Why do cars drive on the parkway and park on the driveway?'}\n# ]\n# }\n#\n# for key, data in context.items():\n# #print data\n# for value in data:\n# print \"Question #\", value[\"id\"], \": \", value[\"content\"]\n# print \"----\"\n\n# data = {\"house\":\"Haus\",\"cat\":\"Katze\",\"red\":\"rot\"}\n# print data.values()\n\ndishes = [\"pizza\", \"sauerkraut\", \"paella\", \"hamburger\"]\ncountries = [\"Italy\", \"Germany\", \"Spain\", \"USA\"]\n\ncountry_specialties = zip(countries, dishes)\n# print country_specialties\ncountry_specialties_dict = dict(country_specialties)\nprint country_specialties_dict\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
def _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,
scale, vol, **kwargs):
assert duration is not None
if subclip is not None:
if isinstance(subclip, (int, float)):
clip = clip.subclip(subclip).set_duration(duration)
else:
subclip_duration = subclip[1] - subclip[0]
if duration > subclip_duration:
c1 = clip.subclip(subclip[0], subclip[1])
c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -
subclip_duration)
clip = concatenate_videoclips([c1, c2])
if clip.audio is not None:
clip = clip.set_audio(clip.audio.set_fps(44100))
else:
clip = clip.subclip(subclip[0], subclip[1]).set_duration(
duration)
if speed is not None:
clip = clip.fx(vfx.speedx, speed)
if frame is not None:
clip = clip.to_ImageClip(frame).set_duration(duration)
if loop:
clip = clip.fx(vfx.loop)
if subclip is None:
clip = clip.set_duration(duration)
if pos is not None:
if pos == 'center':
clip = clip.set_position(('center', 'center'))
elif isinstance(pos, (list, tuple)):
pos = list(pos)
half_size = [(x // 2) for x in clip.size]
for i in range(2):
if isinstance(pos[i], (int, float)):
pos[i] = pos[i] - half_size[i]
pos[i] = int(coreapi.global_scale * pos[i])
clip = clip.set_position(pos)
else:
clip = clip.set_position(pos)
if scale[0] != 1.0 or scale[1] != 1.0:
clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))
return clip
def _update_clip_duration(track):
def is_connected(prev_clip, cur_clip):
return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.
start, rel_tol=0.001)
prev_clip_info = None
for clip_info in track:
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
prev_clip_info.duration = (clip_info.start - prev_clip_info
.start)
prev_clip_info.auto_extend = False
assert prev_clip_info.duration > 0
if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info
, clip_info):
prev_clip_info.fadeout = prev_clip_info.crossfade
prev_clip_info = clip_info
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
duration = prev_clip_info.duration
if 're' in coreapi.pos_dict:
duration = max(duration, coreapi.pos_dict['re'] - clip_info
.start)
prev_clip_info.duration = duration
prev_clip_info.auto_extend = False
if prev_clip_info.crossfade > 0:
prev_clip_info.fadeout = prev_clip_info.crossfade
def _export_video(*, resolution, audio_only):
resolution = [int(x * coreapi.global_scale) for x in resolution]
audio_clips = []
for track in datastruct.video_tracks.values():
_update_clip_duration(track)
video_clips = []
for track_name, track in datastruct.video_tracks.items():
for i, clip_info in enumerate(track):
assert clip_info.mpy_clip is not None
assert clip_info.duration is not None
if clip_info.no_audio:
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
elif clip_info.mpy_clip.audio is not None:
audio_clip = clip_info.mpy_clip.audio
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
if clip_info.subclip is not None:
duration = clip_info.subclip[1] - clip_info.subclip[0]
audio_clip = audio_clip.subclip(clip_info.subclip[0],
clip_info.subclip[1])
else:
duration = clip_info.duration
duration = min(duration, audio_clip.duration)
audio_clip = audio_clip.set_duration(duration)
audio_clip = audio_clip.set_start(clip_info.start)
if clip_info.norm:
audio_clip = audio_clip.fx(afx.audio_normalize)
if clip_info.vol is not None:
if isinstance(clip_info.vol, (int, float)):
audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)
else:
audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,
clip_info.vol)
audio_clips.append(audio_clip)
crossfade_duration = track[i + 1].crossfade if i < len(track
) - 1 else 0
if crossfade_duration:
clip_info.duration += crossfade_duration
clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **
vars(clip_info))
if clip_info.fadein:
assert isinstance(clip_info.fadein, (int, float))
if track_name != 'vid':
clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(
clip_info.fadein)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,
clip_info.fadein)
elif clip_info.crossfade > 0:
video_clips.append(clip_info.mpy_clip.set_duration(
clip_info.crossfade).crossfadein(clip_info.crossfade).
set_start(clip_info.start))
clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.
crossfade)
clip_info.start += clip_info.crossfade
if clip_info.fadeout:
assert isinstance(clip_info.fadeout, (int, float))
if track_name != 'vid':
clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(
clip_info.fadeout)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,
clip_info.fadeout)
video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))
if len(video_clips) == 0:
video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).
set_duration(2))
final_clip = CompositeVideoClip(video_clips, size=resolution)
for _, track in datastruct.audio_tracks.items():
clips = []
for clip_info in track.clips:
if clip_info.loop:
clip = AudioFileClip(clip_info.file, buffersize=400000)
else:
clip = clip_info.mpy_clip
if clip_info.subclip is not None:
clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])
duration = clip_info.duration
if duration is not None:
if clip_info.loop:
clip = clip.fx(afx.audio_loop, duration=duration)
else:
duration = min(duration, clip.duration)
if clip_info.subclip:
duration = min(duration, clip_info.subclip[1] -
clip_info.subclip[0])
clip = clip.set_duration(duration)
if clip_info.start is not None:
clip = clip.set_start(clip_info.start)
if len(clip_info.vol_keypoints) > 0:
clip = _adjust_mpy_audio_clip_volume(clip, clip_info.
vol_keypoints)
clips.append(clip)
if len(clips) > 0:
clip = CompositeAudioClip(clips)
audio_clips.append(clip)
if final_clip.audio:
audio_clips.append(final_clip.audio)
if len(audio_clips) > 0:
final_audio_clip = CompositeAudioClip(audio_clips)
final_clip = final_clip.set_audio(final_audio_clip)
os.makedirs('tmp/out', exist_ok=True)
if audio_only:
final_audio_clip.fps = 44100
final_audio_clip.write_audiofile('%s.mp3' % out_filename)
open_with('%s.mp3' % out_filename, program_id=0)
else:
final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=
'%s.mp3' % out_filename, remove_temp=False, codec='libx264',
threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])
subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',
f'{out_filename}.mp4'], close_fds=True)
<|reserved_special_token_0|>
def _convert_to_readable_time(seconds):
seconds = int(seconds)
seconds = seconds % (24 * 3600)
hour = seconds // 3600
seconds %= 3600
minutes = seconds // 60
seconds %= 60
if hour > 0:
return '%d:%02d:%02d' % (hour, minutes, seconds)
else:
return '%02d:%02d' % (minutes, seconds)
<|reserved_special_token_0|>
@core.api
def include(file):
with open(file, 'r', encoding='utf-8') as f:
s = f.read()
cwd = os.getcwd()
os.chdir(os.path.dirname(os.path.abspath(file)))
_parse_text(s)
os.chdir(cwd)
def _remove_unused_recordings(s):
used_recordings = set()
unused_recordings = []
apis = {'record': lambda f, **kargs: used_recordings.add(f)}
_parse_text(s, apis=apis)
files = [f for f in glob.glob('record/*') if os.path.isfile(f)]
files = [f.replace('\\', '/') for f in files]
for f in files:
if f not in used_recordings:
unused_recordings.append(f)
print2('Used : %d' % len(used_recordings), color='green')
print2('Unused : %d' % len(unused_recordings), color='red')
assert len(used_recordings) + len(unused_recordings) == len(files)
print('Press y to clean up: ', end='', flush=True)
if getch() == 'y':
for f in unused_recordings:
try:
os.remove(f)
except:
print('WARNING: failed to remove: %s' % f)
def _parse_text(text, apis=core.apis, **kwargs):
def find_next(text, needle, p):
pos = text.find(needle, p)
if pos < 0:
pos = len(text)
return pos
text = re.sub('<!--[\\d\\D]*?-->', '', text)
p = 0
while p < len(text):
if text[p:p + 2] == '{{':
end = find_next(text, '}}', p)
python_code = text[p + 2:end].strip()
p = end + 2
if ignore_undefined:
try:
exec(python_code, apis)
except NameError:
pass
else:
exec(python_code, apis)
continue
if text[p:p + 1] == '#':
end = find_next(text, '\n', p)
line = text[p:end].strip()
_write_timestamp(coreapi.pos_dict['a'], line)
p = end + 1
continue
match = re.match('---((?:[0-9]*[.])?[0-9]+)?\n', text[p:])
if match is not None:
if match.group(1) is not None:
coreapi.audio_gap(float(match.group(1)))
else:
coreapi.audio_gap(0.2)
p += match.end(0) + 1
continue
end = find_next(text, '\n', p)
line = text[p:end].strip()
p = end + 1
if line != '' and 'parse_line' in apis:
apis['parse_line'](line)
core.on_api_func(None)
<|reserved_special_token_0|>
def load_config():
import yaml
CONFIG_FILE = 'config.yaml'
DEFAULT_CONFIG = {'fps': 30}
if os.path.exists(CONFIG_FILE):
with open(CONFIG_FILE, 'r') as f:
config = yaml.load(f.read(), Loader=yaml.FullLoader)
else:
with open(CONFIG_FILE, 'w', newline='\n') as f:
yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)
config = DEFAULT_CONFIG
coreapi.fps(config['fps'])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,
scale, vol, **kwargs):
assert duration is not None
if subclip is not None:
if isinstance(subclip, (int, float)):
clip = clip.subclip(subclip).set_duration(duration)
else:
subclip_duration = subclip[1] - subclip[0]
if duration > subclip_duration:
c1 = clip.subclip(subclip[0], subclip[1])
c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -
subclip_duration)
clip = concatenate_videoclips([c1, c2])
if clip.audio is not None:
clip = clip.set_audio(clip.audio.set_fps(44100))
else:
clip = clip.subclip(subclip[0], subclip[1]).set_duration(
duration)
if speed is not None:
clip = clip.fx(vfx.speedx, speed)
if frame is not None:
clip = clip.to_ImageClip(frame).set_duration(duration)
if loop:
clip = clip.fx(vfx.loop)
if subclip is None:
clip = clip.set_duration(duration)
if pos is not None:
if pos == 'center':
clip = clip.set_position(('center', 'center'))
elif isinstance(pos, (list, tuple)):
pos = list(pos)
half_size = [(x // 2) for x in clip.size]
for i in range(2):
if isinstance(pos[i], (int, float)):
pos[i] = pos[i] - half_size[i]
pos[i] = int(coreapi.global_scale * pos[i])
clip = clip.set_position(pos)
else:
clip = clip.set_position(pos)
if scale[0] != 1.0 or scale[1] != 1.0:
clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))
return clip
def _update_clip_duration(track):
def is_connected(prev_clip, cur_clip):
return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.
start, rel_tol=0.001)
prev_clip_info = None
for clip_info in track:
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
prev_clip_info.duration = (clip_info.start - prev_clip_info
.start)
prev_clip_info.auto_extend = False
assert prev_clip_info.duration > 0
if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info
, clip_info):
prev_clip_info.fadeout = prev_clip_info.crossfade
prev_clip_info = clip_info
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
duration = prev_clip_info.duration
if 're' in coreapi.pos_dict:
duration = max(duration, coreapi.pos_dict['re'] - clip_info
.start)
prev_clip_info.duration = duration
prev_clip_info.auto_extend = False
if prev_clip_info.crossfade > 0:
prev_clip_info.fadeout = prev_clip_info.crossfade
def _export_video(*, resolution, audio_only):
resolution = [int(x * coreapi.global_scale) for x in resolution]
audio_clips = []
for track in datastruct.video_tracks.values():
_update_clip_duration(track)
video_clips = []
for track_name, track in datastruct.video_tracks.items():
for i, clip_info in enumerate(track):
assert clip_info.mpy_clip is not None
assert clip_info.duration is not None
if clip_info.no_audio:
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
elif clip_info.mpy_clip.audio is not None:
audio_clip = clip_info.mpy_clip.audio
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
if clip_info.subclip is not None:
duration = clip_info.subclip[1] - clip_info.subclip[0]
audio_clip = audio_clip.subclip(clip_info.subclip[0],
clip_info.subclip[1])
else:
duration = clip_info.duration
duration = min(duration, audio_clip.duration)
audio_clip = audio_clip.set_duration(duration)
audio_clip = audio_clip.set_start(clip_info.start)
if clip_info.norm:
audio_clip = audio_clip.fx(afx.audio_normalize)
if clip_info.vol is not None:
if isinstance(clip_info.vol, (int, float)):
audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)
else:
audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,
clip_info.vol)
audio_clips.append(audio_clip)
crossfade_duration = track[i + 1].crossfade if i < len(track
) - 1 else 0
if crossfade_duration:
clip_info.duration += crossfade_duration
clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **
vars(clip_info))
if clip_info.fadein:
assert isinstance(clip_info.fadein, (int, float))
if track_name != 'vid':
clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(
clip_info.fadein)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,
clip_info.fadein)
elif clip_info.crossfade > 0:
video_clips.append(clip_info.mpy_clip.set_duration(
clip_info.crossfade).crossfadein(clip_info.crossfade).
set_start(clip_info.start))
clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.
crossfade)
clip_info.start += clip_info.crossfade
if clip_info.fadeout:
assert isinstance(clip_info.fadeout, (int, float))
if track_name != 'vid':
clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(
clip_info.fadeout)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,
clip_info.fadeout)
video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))
if len(video_clips) == 0:
video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).
set_duration(2))
final_clip = CompositeVideoClip(video_clips, size=resolution)
for _, track in datastruct.audio_tracks.items():
clips = []
for clip_info in track.clips:
if clip_info.loop:
clip = AudioFileClip(clip_info.file, buffersize=400000)
else:
clip = clip_info.mpy_clip
if clip_info.subclip is not None:
clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])
duration = clip_info.duration
if duration is not None:
if clip_info.loop:
clip = clip.fx(afx.audio_loop, duration=duration)
else:
duration = min(duration, clip.duration)
if clip_info.subclip:
duration = min(duration, clip_info.subclip[1] -
clip_info.subclip[0])
clip = clip.set_duration(duration)
if clip_info.start is not None:
clip = clip.set_start(clip_info.start)
if len(clip_info.vol_keypoints) > 0:
clip = _adjust_mpy_audio_clip_volume(clip, clip_info.
vol_keypoints)
clips.append(clip)
if len(clips) > 0:
clip = CompositeAudioClip(clips)
audio_clips.append(clip)
if final_clip.audio:
audio_clips.append(final_clip.audio)
if len(audio_clips) > 0:
final_audio_clip = CompositeAudioClip(audio_clips)
final_clip = final_clip.set_audio(final_audio_clip)
os.makedirs('tmp/out', exist_ok=True)
if audio_only:
final_audio_clip.fps = 44100
final_audio_clip.write_audiofile('%s.mp3' % out_filename)
open_with('%s.mp3' % out_filename, program_id=0)
else:
final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=
'%s.mp3' % out_filename, remove_temp=False, codec='libx264',
threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])
subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',
f'{out_filename}.mp4'], close_fds=True)
def _adjust_mpy_audio_clip_volume(clip, vol_keypoints):
xp = []
fp = []
print('vol_keypoints:', vol_keypoints)
for p, vol in vol_keypoints:
if isinstance(vol, (int, float)):
xp.append(p)
fp.append(vol)
else:
raise Exception('unsupported bgm parameter type:' % type(vol))
def volume_adjust(gf, t):
factor = np.interp(t, xp, fp)
factor = np.vstack([factor, factor]).T
return factor * gf(t)
return clip.fl(volume_adjust)
def _convert_to_readable_time(seconds):
seconds = int(seconds)
seconds = seconds % (24 * 3600)
hour = seconds // 3600
seconds %= 3600
minutes = seconds // 60
seconds %= 60
if hour > 0:
return '%d:%02d:%02d' % (hour, minutes, seconds)
else:
return '%02d:%02d' % (minutes, seconds)
<|reserved_special_token_0|>
@core.api
def include(file):
with open(file, 'r', encoding='utf-8') as f:
s = f.read()
cwd = os.getcwd()
os.chdir(os.path.dirname(os.path.abspath(file)))
_parse_text(s)
os.chdir(cwd)
def _remove_unused_recordings(s):
used_recordings = set()
unused_recordings = []
apis = {'record': lambda f, **kargs: used_recordings.add(f)}
_parse_text(s, apis=apis)
files = [f for f in glob.glob('record/*') if os.path.isfile(f)]
files = [f.replace('\\', '/') for f in files]
for f in files:
if f not in used_recordings:
unused_recordings.append(f)
print2('Used : %d' % len(used_recordings), color='green')
print2('Unused : %d' % len(unused_recordings), color='red')
assert len(used_recordings) + len(unused_recordings) == len(files)
print('Press y to clean up: ', end='', flush=True)
if getch() == 'y':
for f in unused_recordings:
try:
os.remove(f)
except:
print('WARNING: failed to remove: %s' % f)
def _parse_text(text, apis=core.apis, **kwargs):
def find_next(text, needle, p):
pos = text.find(needle, p)
if pos < 0:
pos = len(text)
return pos
text = re.sub('<!--[\\d\\D]*?-->', '', text)
p = 0
while p < len(text):
if text[p:p + 2] == '{{':
end = find_next(text, '}}', p)
python_code = text[p + 2:end].strip()
p = end + 2
if ignore_undefined:
try:
exec(python_code, apis)
except NameError:
pass
else:
exec(python_code, apis)
continue
if text[p:p + 1] == '#':
end = find_next(text, '\n', p)
line = text[p:end].strip()
_write_timestamp(coreapi.pos_dict['a'], line)
p = end + 1
continue
match = re.match('---((?:[0-9]*[.])?[0-9]+)?\n', text[p:])
if match is not None:
if match.group(1) is not None:
coreapi.audio_gap(float(match.group(1)))
else:
coreapi.audio_gap(0.2)
p += match.end(0) + 1
continue
end = find_next(text, '\n', p)
line = text[p:end].strip()
p = end + 1
if line != '' and 'parse_line' in apis:
apis['parse_line'](line)
core.on_api_func(None)
<|reserved_special_token_0|>
def load_config():
import yaml
CONFIG_FILE = 'config.yaml'
DEFAULT_CONFIG = {'fps': 30}
if os.path.exists(CONFIG_FILE):
with open(CONFIG_FILE, 'r') as f:
config = yaml.load(f.read(), Loader=yaml.FullLoader)
else:
with open(CONFIG_FILE, 'w', newline='\n') as f:
yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)
config = DEFAULT_CONFIG
coreapi.fps(config['fps'])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,
scale, vol, **kwargs):
assert duration is not None
if subclip is not None:
if isinstance(subclip, (int, float)):
clip = clip.subclip(subclip).set_duration(duration)
else:
subclip_duration = subclip[1] - subclip[0]
if duration > subclip_duration:
c1 = clip.subclip(subclip[0], subclip[1])
c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -
subclip_duration)
clip = concatenate_videoclips([c1, c2])
if clip.audio is not None:
clip = clip.set_audio(clip.audio.set_fps(44100))
else:
clip = clip.subclip(subclip[0], subclip[1]).set_duration(
duration)
if speed is not None:
clip = clip.fx(vfx.speedx, speed)
if frame is not None:
clip = clip.to_ImageClip(frame).set_duration(duration)
if loop:
clip = clip.fx(vfx.loop)
if subclip is None:
clip = clip.set_duration(duration)
if pos is not None:
if pos == 'center':
clip = clip.set_position(('center', 'center'))
elif isinstance(pos, (list, tuple)):
pos = list(pos)
half_size = [(x // 2) for x in clip.size]
for i in range(2):
if isinstance(pos[i], (int, float)):
pos[i] = pos[i] - half_size[i]
pos[i] = int(coreapi.global_scale * pos[i])
clip = clip.set_position(pos)
else:
clip = clip.set_position(pos)
if scale[0] != 1.0 or scale[1] != 1.0:
clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))
return clip
def _update_clip_duration(track):
def is_connected(prev_clip, cur_clip):
return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.
start, rel_tol=0.001)
prev_clip_info = None
for clip_info in track:
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
prev_clip_info.duration = (clip_info.start - prev_clip_info
.start)
prev_clip_info.auto_extend = False
assert prev_clip_info.duration > 0
if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info
, clip_info):
prev_clip_info.fadeout = prev_clip_info.crossfade
prev_clip_info = clip_info
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
duration = prev_clip_info.duration
if 're' in coreapi.pos_dict:
duration = max(duration, coreapi.pos_dict['re'] - clip_info
.start)
prev_clip_info.duration = duration
prev_clip_info.auto_extend = False
if prev_clip_info.crossfade > 0:
prev_clip_info.fadeout = prev_clip_info.crossfade
def _export_video(*, resolution, audio_only):
resolution = [int(x * coreapi.global_scale) for x in resolution]
audio_clips = []
for track in datastruct.video_tracks.values():
_update_clip_duration(track)
video_clips = []
for track_name, track in datastruct.video_tracks.items():
for i, clip_info in enumerate(track):
assert clip_info.mpy_clip is not None
assert clip_info.duration is not None
if clip_info.no_audio:
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
elif clip_info.mpy_clip.audio is not None:
audio_clip = clip_info.mpy_clip.audio
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
if clip_info.subclip is not None:
duration = clip_info.subclip[1] - clip_info.subclip[0]
audio_clip = audio_clip.subclip(clip_info.subclip[0],
clip_info.subclip[1])
else:
duration = clip_info.duration
duration = min(duration, audio_clip.duration)
audio_clip = audio_clip.set_duration(duration)
audio_clip = audio_clip.set_start(clip_info.start)
if clip_info.norm:
audio_clip = audio_clip.fx(afx.audio_normalize)
if clip_info.vol is not None:
if isinstance(clip_info.vol, (int, float)):
audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)
else:
audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,
clip_info.vol)
audio_clips.append(audio_clip)
crossfade_duration = track[i + 1].crossfade if i < len(track
) - 1 else 0
if crossfade_duration:
clip_info.duration += crossfade_duration
clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **
vars(clip_info))
if clip_info.fadein:
assert isinstance(clip_info.fadein, (int, float))
if track_name != 'vid':
clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(
clip_info.fadein)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,
clip_info.fadein)
elif clip_info.crossfade > 0:
video_clips.append(clip_info.mpy_clip.set_duration(
clip_info.crossfade).crossfadein(clip_info.crossfade).
set_start(clip_info.start))
clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.
crossfade)
clip_info.start += clip_info.crossfade
if clip_info.fadeout:
assert isinstance(clip_info.fadeout, (int, float))
if track_name != 'vid':
clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(
clip_info.fadeout)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,
clip_info.fadeout)
video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))
if len(video_clips) == 0:
video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).
set_duration(2))
final_clip = CompositeVideoClip(video_clips, size=resolution)
for _, track in datastruct.audio_tracks.items():
clips = []
for clip_info in track.clips:
if clip_info.loop:
clip = AudioFileClip(clip_info.file, buffersize=400000)
else:
clip = clip_info.mpy_clip
if clip_info.subclip is not None:
clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])
duration = clip_info.duration
if duration is not None:
if clip_info.loop:
clip = clip.fx(afx.audio_loop, duration=duration)
else:
duration = min(duration, clip.duration)
if clip_info.subclip:
duration = min(duration, clip_info.subclip[1] -
clip_info.subclip[0])
clip = clip.set_duration(duration)
if clip_info.start is not None:
clip = clip.set_start(clip_info.start)
if len(clip_info.vol_keypoints) > 0:
clip = _adjust_mpy_audio_clip_volume(clip, clip_info.
vol_keypoints)
clips.append(clip)
if len(clips) > 0:
clip = CompositeAudioClip(clips)
audio_clips.append(clip)
if final_clip.audio:
audio_clips.append(final_clip.audio)
if len(audio_clips) > 0:
final_audio_clip = CompositeAudioClip(audio_clips)
final_clip = final_clip.set_audio(final_audio_clip)
os.makedirs('tmp/out', exist_ok=True)
if audio_only:
final_audio_clip.fps = 44100
final_audio_clip.write_audiofile('%s.mp3' % out_filename)
open_with('%s.mp3' % out_filename, program_id=0)
else:
final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=
'%s.mp3' % out_filename, remove_temp=False, codec='libx264',
threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])
subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',
f'{out_filename}.mp4'], close_fds=True)
def _adjust_mpy_audio_clip_volume(clip, vol_keypoints):
xp = []
fp = []
print('vol_keypoints:', vol_keypoints)
for p, vol in vol_keypoints:
if isinstance(vol, (int, float)):
xp.append(p)
fp.append(vol)
else:
raise Exception('unsupported bgm parameter type:' % type(vol))
def volume_adjust(gf, t):
factor = np.interp(t, xp, fp)
factor = np.vstack([factor, factor]).T
return factor * gf(t)
return clip.fl(volume_adjust)
def _convert_to_readable_time(seconds):
seconds = int(seconds)
seconds = seconds % (24 * 3600)
hour = seconds // 3600
seconds %= 3600
minutes = seconds // 60
seconds %= 60
if hour > 0:
return '%d:%02d:%02d' % (hour, minutes, seconds)
else:
return '%02d:%02d' % (minutes, seconds)
def _write_timestamp(t, section_name):
os.makedirs(os.path.dirname(out_filename), exist_ok=True)
if not hasattr(_write_timestamp, 'f'):
_write_timestamp.f = open('%s.txt' % out_filename, 'w', encoding=
'utf-8')
_write_timestamp.f.write('%s (%s)\n' % (section_name,
_convert_to_readable_time(t)))
_write_timestamp.f.flush()
@core.api
def include(file):
with open(file, 'r', encoding='utf-8') as f:
s = f.read()
cwd = os.getcwd()
os.chdir(os.path.dirname(os.path.abspath(file)))
_parse_text(s)
os.chdir(cwd)
def _remove_unused_recordings(s):
used_recordings = set()
unused_recordings = []
apis = {'record': lambda f, **kargs: used_recordings.add(f)}
_parse_text(s, apis=apis)
files = [f for f in glob.glob('record/*') if os.path.isfile(f)]
files = [f.replace('\\', '/') for f in files]
for f in files:
if f not in used_recordings:
unused_recordings.append(f)
print2('Used : %d' % len(used_recordings), color='green')
print2('Unused : %d' % len(unused_recordings), color='red')
assert len(used_recordings) + len(unused_recordings) == len(files)
print('Press y to clean up: ', end='', flush=True)
if getch() == 'y':
for f in unused_recordings:
try:
os.remove(f)
except:
print('WARNING: failed to remove: %s' % f)
def _parse_text(text, apis=core.apis, **kwargs):
def find_next(text, needle, p):
pos = text.find(needle, p)
if pos < 0:
pos = len(text)
return pos
text = re.sub('<!--[\\d\\D]*?-->', '', text)
p = 0
while p < len(text):
if text[p:p + 2] == '{{':
end = find_next(text, '}}', p)
python_code = text[p + 2:end].strip()
p = end + 2
if ignore_undefined:
try:
exec(python_code, apis)
except NameError:
pass
else:
exec(python_code, apis)
continue
if text[p:p + 1] == '#':
end = find_next(text, '\n', p)
line = text[p:end].strip()
_write_timestamp(coreapi.pos_dict['a'], line)
p = end + 1
continue
match = re.match('---((?:[0-9]*[.])?[0-9]+)?\n', text[p:])
if match is not None:
if match.group(1) is not None:
coreapi.audio_gap(float(match.group(1)))
else:
coreapi.audio_gap(0.2)
p += match.end(0) + 1
continue
end = find_next(text, '\n', p)
line = text[p:end].strip()
p = end + 1
if line != '' and 'parse_line' in apis:
apis['parse_line'](line)
core.on_api_func(None)
<|reserved_special_token_0|>
def load_config():
import yaml
CONFIG_FILE = 'config.yaml'
DEFAULT_CONFIG = {'fps': 30}
if os.path.exists(CONFIG_FILE):
with open(CONFIG_FILE, 'r') as f:
config = yaml.load(f.read(), Loader=yaml.FullLoader)
else:
with open(CONFIG_FILE, 'w', newline='\n') as f:
yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)
config = DEFAULT_CONFIG
coreapi.fps(config['fps'])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if 1:
change_settings({'FFMPEG_BINARY': get_executable('ffmpeg')})
def _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,
scale, vol, **kwargs):
assert duration is not None
if subclip is not None:
if isinstance(subclip, (int, float)):
clip = clip.subclip(subclip).set_duration(duration)
else:
subclip_duration = subclip[1] - subclip[0]
if duration > subclip_duration:
c1 = clip.subclip(subclip[0], subclip[1])
c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -
subclip_duration)
clip = concatenate_videoclips([c1, c2])
if clip.audio is not None:
clip = clip.set_audio(clip.audio.set_fps(44100))
else:
clip = clip.subclip(subclip[0], subclip[1]).set_duration(
duration)
if speed is not None:
clip = clip.fx(vfx.speedx, speed)
if frame is not None:
clip = clip.to_ImageClip(frame).set_duration(duration)
if loop:
clip = clip.fx(vfx.loop)
if subclip is None:
clip = clip.set_duration(duration)
if pos is not None:
if pos == 'center':
clip = clip.set_position(('center', 'center'))
elif isinstance(pos, (list, tuple)):
pos = list(pos)
half_size = [(x // 2) for x in clip.size]
for i in range(2):
if isinstance(pos[i], (int, float)):
pos[i] = pos[i] - half_size[i]
pos[i] = int(coreapi.global_scale * pos[i])
clip = clip.set_position(pos)
else:
clip = clip.set_position(pos)
if scale[0] != 1.0 or scale[1] != 1.0:
clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))
return clip
def _update_clip_duration(track):
def is_connected(prev_clip, cur_clip):
return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.
start, rel_tol=0.001)
prev_clip_info = None
for clip_info in track:
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
prev_clip_info.duration = (clip_info.start - prev_clip_info
.start)
prev_clip_info.auto_extend = False
assert prev_clip_info.duration > 0
if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info
, clip_info):
prev_clip_info.fadeout = prev_clip_info.crossfade
prev_clip_info = clip_info
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
duration = prev_clip_info.duration
if 're' in coreapi.pos_dict:
duration = max(duration, coreapi.pos_dict['re'] - clip_info
.start)
prev_clip_info.duration = duration
prev_clip_info.auto_extend = False
if prev_clip_info.crossfade > 0:
prev_clip_info.fadeout = prev_clip_info.crossfade
def _export_video(*, resolution, audio_only):
resolution = [int(x * coreapi.global_scale) for x in resolution]
audio_clips = []
for track in datastruct.video_tracks.values():
_update_clip_duration(track)
video_clips = []
for track_name, track in datastruct.video_tracks.items():
for i, clip_info in enumerate(track):
assert clip_info.mpy_clip is not None
assert clip_info.duration is not None
if clip_info.no_audio:
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
elif clip_info.mpy_clip.audio is not None:
audio_clip = clip_info.mpy_clip.audio
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
if clip_info.subclip is not None:
duration = clip_info.subclip[1] - clip_info.subclip[0]
audio_clip = audio_clip.subclip(clip_info.subclip[0],
clip_info.subclip[1])
else:
duration = clip_info.duration
duration = min(duration, audio_clip.duration)
audio_clip = audio_clip.set_duration(duration)
audio_clip = audio_clip.set_start(clip_info.start)
if clip_info.norm:
audio_clip = audio_clip.fx(afx.audio_normalize)
if clip_info.vol is not None:
if isinstance(clip_info.vol, (int, float)):
audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)
else:
audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,
clip_info.vol)
audio_clips.append(audio_clip)
crossfade_duration = track[i + 1].crossfade if i < len(track
) - 1 else 0
if crossfade_duration:
clip_info.duration += crossfade_duration
clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **
vars(clip_info))
if clip_info.fadein:
assert isinstance(clip_info.fadein, (int, float))
if track_name != 'vid':
clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(
clip_info.fadein)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,
clip_info.fadein)
elif clip_info.crossfade > 0:
video_clips.append(clip_info.mpy_clip.set_duration(
clip_info.crossfade).crossfadein(clip_info.crossfade).
set_start(clip_info.start))
clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.
crossfade)
clip_info.start += clip_info.crossfade
if clip_info.fadeout:
assert isinstance(clip_info.fadeout, (int, float))
if track_name != 'vid':
clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(
clip_info.fadeout)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,
clip_info.fadeout)
video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))
if len(video_clips) == 0:
video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).
set_duration(2))
final_clip = CompositeVideoClip(video_clips, size=resolution)
for _, track in datastruct.audio_tracks.items():
clips = []
for clip_info in track.clips:
if clip_info.loop:
clip = AudioFileClip(clip_info.file, buffersize=400000)
else:
clip = clip_info.mpy_clip
if clip_info.subclip is not None:
clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])
duration = clip_info.duration
if duration is not None:
if clip_info.loop:
clip = clip.fx(afx.audio_loop, duration=duration)
else:
duration = min(duration, clip.duration)
if clip_info.subclip:
duration = min(duration, clip_info.subclip[1] -
clip_info.subclip[0])
clip = clip.set_duration(duration)
if clip_info.start is not None:
clip = clip.set_start(clip_info.start)
if len(clip_info.vol_keypoints) > 0:
clip = _adjust_mpy_audio_clip_volume(clip, clip_info.
vol_keypoints)
clips.append(clip)
if len(clips) > 0:
clip = CompositeAudioClip(clips)
audio_clips.append(clip)
if final_clip.audio:
audio_clips.append(final_clip.audio)
if len(audio_clips) > 0:
final_audio_clip = CompositeAudioClip(audio_clips)
final_clip = final_clip.set_audio(final_audio_clip)
os.makedirs('tmp/out', exist_ok=True)
if audio_only:
final_audio_clip.fps = 44100
final_audio_clip.write_audiofile('%s.mp3' % out_filename)
open_with('%s.mp3' % out_filename, program_id=0)
else:
final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=
'%s.mp3' % out_filename, remove_temp=False, codec='libx264',
threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])
subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',
f'{out_filename}.mp4'], close_fds=True)
def _adjust_mpy_audio_clip_volume(clip, vol_keypoints):
xp = []
fp = []
print('vol_keypoints:', vol_keypoints)
for p, vol in vol_keypoints:
if isinstance(vol, (int, float)):
xp.append(p)
fp.append(vol)
else:
raise Exception('unsupported bgm parameter type:' % type(vol))
def volume_adjust(gf, t):
factor = np.interp(t, xp, fp)
factor = np.vstack([factor, factor]).T
return factor * gf(t)
return clip.fl(volume_adjust)
def _convert_to_readable_time(seconds):
seconds = int(seconds)
seconds = seconds % (24 * 3600)
hour = seconds // 3600
seconds %= 3600
minutes = seconds // 60
seconds %= 60
if hour > 0:
return '%d:%02d:%02d' % (hour, minutes, seconds)
else:
return '%02d:%02d' % (minutes, seconds)
def _write_timestamp(t, section_name):
os.makedirs(os.path.dirname(out_filename), exist_ok=True)
if not hasattr(_write_timestamp, 'f'):
_write_timestamp.f = open('%s.txt' % out_filename, 'w', encoding=
'utf-8')
_write_timestamp.f.write('%s (%s)\n' % (section_name,
_convert_to_readable_time(t)))
_write_timestamp.f.flush()
@core.api
def include(file):
with open(file, 'r', encoding='utf-8') as f:
s = f.read()
cwd = os.getcwd()
os.chdir(os.path.dirname(os.path.abspath(file)))
_parse_text(s)
os.chdir(cwd)
def _remove_unused_recordings(s):
used_recordings = set()
unused_recordings = []
apis = {'record': lambda f, **kargs: used_recordings.add(f)}
_parse_text(s, apis=apis)
files = [f for f in glob.glob('record/*') if os.path.isfile(f)]
files = [f.replace('\\', '/') for f in files]
for f in files:
if f not in used_recordings:
unused_recordings.append(f)
print2('Used : %d' % len(used_recordings), color='green')
print2('Unused : %d' % len(unused_recordings), color='red')
assert len(used_recordings) + len(unused_recordings) == len(files)
print('Press y to clean up: ', end='', flush=True)
if getch() == 'y':
for f in unused_recordings:
try:
os.remove(f)
except:
print('WARNING: failed to remove: %s' % f)
def _parse_text(text, apis=core.apis, **kwargs):
def find_next(text, needle, p):
pos = text.find(needle, p)
if pos < 0:
pos = len(text)
return pos
text = re.sub('<!--[\\d\\D]*?-->', '', text)
p = 0
while p < len(text):
if text[p:p + 2] == '{{':
end = find_next(text, '}}', p)
python_code = text[p + 2:end].strip()
p = end + 2
if ignore_undefined:
try:
exec(python_code, apis)
except NameError:
pass
else:
exec(python_code, apis)
continue
if text[p:p + 1] == '#':
end = find_next(text, '\n', p)
line = text[p:end].strip()
_write_timestamp(coreapi.pos_dict['a'], line)
p = end + 1
continue
match = re.match('---((?:[0-9]*[.])?[0-9]+)?\n', text[p:])
if match is not None:
if match.group(1) is not None:
coreapi.audio_gap(float(match.group(1)))
else:
coreapi.audio_gap(0.2)
p += match.end(0) + 1
continue
end = find_next(text, '\n', p)
line = text[p:end].strip()
p = end + 1
if line != '' and 'parse_line' in apis:
apis['parse_line'](line)
core.on_api_func(None)
def _show_stats(s):
TIME_PER_CHAR = 0.1334154351395731
total = 0
def parse_line(line):
nonlocal total
total += len(line)
_parse_text(s, apis={'parse_line': parse_line}, ignore_undefined=True)
total_secs = TIME_PER_CHAR * total
print('Estimated Time: %s' % format_time(total_secs))
input()
def load_config():
import yaml
CONFIG_FILE = 'config.yaml'
DEFAULT_CONFIG = {'fps': 30}
if os.path.exists(CONFIG_FILE):
with open(CONFIG_FILE, 'r') as f:
config = yaml.load(f.read(), Loader=yaml.FullLoader)
else:
with open(CONFIG_FILE, 'w', newline='\n') as f:
yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)
config = DEFAULT_CONFIG
coreapi.fps(config['fps'])
if __name__ == '__main__':
out_filename = 'tmp/out/' + get_time_str()
parser = argparse.ArgumentParser()
parser.add_argument('--stdin', default=False, action='store_true')
parser.add_argument('--proj_dir', type=str, default=None)
parser.add_argument('-i', '--input', type=str, default=None)
parser.add_argument('-a', '--audio_only', action='store_true', default=
False)
parser.add_argument('--remove_unused_recordings', action='store_true',
default=False)
parser.add_argument('--show_stats', action='store_true', default=False)
parser.add_argument('--preview', action='store_true', default=False)
args = parser.parse_args()
if args.proj_dir is not None:
os.chdir(args.proj_dir)
elif args.input:
os.chdir(os.path.dirname(args.input))
print('Project dir: %s' % os.getcwd())
if os.path.exists('api.py'):
sys.path.append(os.getcwd())
mymodule = importlib.import_module('api')
global_functions = inspect.getmembers(mymodule, inspect.isfunction)
core.apis.update({k: v for k, v in global_functions})
if args.audio_only:
coreapi.audio_only()
if args.stdin:
s = sys.stdin.read()
elif args.input:
with open(args.input, 'r', encoding='utf-8') as f:
s = f.read()
else:
raise Exception('Either --stdin or --input should be specified.')
load_config()
if args.preview:
coreapi.preview()
if args.remove_unused_recordings:
ignore_undefined = True
_remove_unused_recordings(s)
elif args.show_stats:
ignore_undefined = True
_show_stats(s)
else:
_parse_text(s, apis=core.apis)
_export_video(resolution=(1920, 1080), audio_only=args.audio_only)
<|reserved_special_token_1|>
import argparse
import glob
import importlib
import inspect
import math
import os
import re
import subprocess
import sys
import moviepy.audio.fx.all as afx
import moviepy.video.fx.all as vfx
import numpy as np
from _appmanager import get_executable
from _shutil import format_time, get_time_str, getch, print2
from moviepy.config import change_settings
from moviepy.editor import *
from open_with.open_with import open_with
import codeapi
import core
import coreapi
import datastruct
SCRIPT_ROOT = os.path.dirname(os.path.abspath(__file__))
ignore_undefined = False
if 1:
change_settings({"FFMPEG_BINARY": get_executable("ffmpeg")})
# def _get_markers(file):
# marker_file = file + ".marker.txt"
# if os.path.exists(marker_file):
# with open(marker_file, "r") as f:
# s = f.read()
# return [float(x) for x in s.split()]
# else:
# return None
# def _load_and_expand_img(f):
# fg = Image.open(f).convert("RGBA")
# bg = Image.new("RGB", (1920, 1080))
# bg.paste(fg, ((bg.width - fg.width) // 2, (bg.height - fg.height) // 2), fg)
# return np.array(bg)
def _update_mpy_clip(
clip, subclip, speed, frame, norm, loop, duration, pos, scale, vol, **kwargs,
):
assert duration is not None
# video clip operations / fx
if subclip is not None:
if isinstance(subclip, (int, float)):
clip = clip.subclip(subclip).set_duration(duration)
else:
subclip_duration = subclip[1] - subclip[0]
if duration > subclip_duration:
c1 = clip.subclip(subclip[0], subclip[1])
c2 = clip.to_ImageClip(subclip[1]).set_duration(
duration - subclip_duration
)
clip = concatenate_videoclips([c1, c2])
# HACK: workaround for a bug: 'CompositeAudioClip' object has no attribute 'fps'
if clip.audio is not None:
clip = clip.set_audio(clip.audio.set_fps(44100))
else:
clip = clip.subclip(subclip[0], subclip[1]).set_duration(duration)
if speed is not None:
clip = clip.fx(
# pylint: disable=maybe-no-member
vfx.speedx,
speed,
)
if frame is not None:
clip = clip.to_ImageClip(frame).set_duration(duration)
# Loop or change duration
if loop:
clip = clip.fx(
# pylint: disable=maybe-no-member
vfx.loop
)
if subclip is None:
clip = clip.set_duration(duration)
if pos is not None:
# (x, y) marks the center location of the of the clip instead of the top
# left corner.
if pos == "center":
clip = clip.set_position(("center", "center"))
elif isinstance(pos, (list, tuple)):
pos = list(pos)
half_size = [x // 2 for x in clip.size]
for i in range(2):
if isinstance(pos[i], (int, float)):
pos[i] = pos[i] - half_size[i]
pos[i] = int(coreapi.global_scale * pos[i])
clip = clip.set_position(pos)
else:
clip = clip.set_position(pos)
if scale[0] != 1.0 or scale[1] != 1.0:
clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))
return clip
def _update_clip_duration(track):
def is_connected(prev_clip, cur_clip):
return math.isclose(
prev_clip.start + prev_clip.duration, cur_clip.start, rel_tol=1e-3,
)
prev_clip_info = None
for clip_info in track:
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
prev_clip_info.duration = clip_info.start - prev_clip_info.start
prev_clip_info.auto_extend = False
assert prev_clip_info.duration > 0
# Apply fadeout to previous clip if it's not connected with
# current clip.
if prev_clip_info.crossfade > 0 and not is_connected(
prev_clip_info, clip_info
):
prev_clip_info.fadeout = prev_clip_info.crossfade
prev_clip_info = clip_info
# Update last clip duration
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
duration = prev_clip_info.duration
# Extend the last video clip to match the voice track
if "re" in coreapi.pos_dict:
duration = max(duration, coreapi.pos_dict["re"] - clip_info.start)
prev_clip_info.duration = duration
prev_clip_info.auto_extend = False
if prev_clip_info.crossfade > 0:
prev_clip_info.fadeout = prev_clip_info.crossfade
def _export_video(*, resolution, audio_only):
resolution = [int(x * coreapi.global_scale) for x in resolution]
audio_clips = []
# Update clip duration for each track
for track in datastruct.video_tracks.values():
_update_clip_duration(track)
# TODO: post-process video track clips
# Update MoviePy clip object in each track.
video_clips = []
for track_name, track in datastruct.video_tracks.items():
for i, clip_info in enumerate(track):
assert clip_info.mpy_clip is not None
assert clip_info.duration is not None
# Unlink audio clip from video clip (adjust audio duration)
if clip_info.no_audio:
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
elif clip_info.mpy_clip.audio is not None:
audio_clip = clip_info.mpy_clip.audio
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
# Audio timing
# TODO: audio subclip
if clip_info.subclip is not None:
duration = clip_info.subclip[1] - clip_info.subclip[0]
audio_clip = audio_clip.subclip(
clip_info.subclip[0], clip_info.subclip[1]
)
else:
duration = clip_info.duration
duration = min(duration, audio_clip.duration)
audio_clip = audio_clip.set_duration(duration)
audio_clip = audio_clip.set_start(clip_info.start)
# Adjust volume
if clip_info.norm:
audio_clip = audio_clip.fx(
# pylint: disable=maybe-no-member
afx.audio_normalize
)
if clip_info.vol is not None:
if isinstance(clip_info.vol, (int, float)):
audio_clip = audio_clip.fx(
# pylint: disable=maybe-no-member
afx.volumex,
clip_info.vol,
)
else:
audio_clip = _adjust_mpy_audio_clip_volume(
audio_clip, clip_info.vol
)
audio_clips.append(audio_clip)
# If the next clip has crossfade enabled
crossfade_duration = track[i + 1].crossfade if (i < len(track) - 1) else 0
if crossfade_duration:
# clip_info.fadeout = crossfade_duration # Fadeout current clip
clip_info.duration += crossfade_duration
clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **vars(clip_info))
# Deal with video fade in / out / crossfade
if clip_info.fadein:
assert isinstance(clip_info.fadein, (int, float))
# TODO: crossfadein and crossfadeout is very slow in moviepy
if track_name != "vid":
clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(
clip_info.fadein
)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(
# pylint: disable=maybe-no-member
vfx.fadein,
clip_info.fadein,
)
elif (
clip_info.crossfade > 0
): # crossfade and fadein should not happen at the same time
video_clips.append(
clip_info.mpy_clip.set_duration(clip_info.crossfade)
.crossfadein(clip_info.crossfade)
.set_start(clip_info.start)
)
clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.crossfade)
clip_info.start += clip_info.crossfade
if clip_info.fadeout:
assert isinstance(clip_info.fadeout, (int, float))
if track_name != "vid":
# pylint: disable=maybe-no-member
clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(
clip_info.fadeout
)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(
# pylint: disable=maybe-no-member
vfx.fadeout,
clip_info.fadeout,
)
video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))
if len(video_clips) == 0:
video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).set_duration(2))
# raise Exception("no video clips??")
final_clip = CompositeVideoClip(video_clips, size=resolution)
# Resize here is too late, does not speed up the video encoding at all.
# final_clip = final_clip.resize(width=480)
# Deal with audio clips
for _, track in datastruct.audio_tracks.items():
clips = []
for clip_info in track.clips:
if clip_info.loop:
# HACK: reload the clip.
#
# still don't know why using loaded mpy_clip directly will cause
# "IndexError: index -200001 is out of bounds for axis 0 with
# size 0"...
clip = AudioFileClip(clip_info.file, buffersize=400000)
else:
clip = clip_info.mpy_clip
if clip_info.subclip is not None:
clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])
duration = clip_info.duration
if duration is not None:
if clip_info.loop:
# pylint: disable=maybe-no-member
clip = clip.fx(afx.audio_loop, duration=duration)
else:
duration = min(duration, clip.duration)
if clip_info.subclip:
duration = min(
duration, clip_info.subclip[1] - clip_info.subclip[0]
)
clip = clip.set_duration(duration)
if clip_info.start is not None:
clip = clip.set_start(clip_info.start)
# Adjust volume by keypoints
if len(clip_info.vol_keypoints) > 0:
clip = _adjust_mpy_audio_clip_volume(clip, clip_info.vol_keypoints)
clips.append(clip)
if len(clips) > 0:
clip = CompositeAudioClip(clips)
audio_clips.append(clip)
if final_clip.audio:
audio_clips.append(final_clip.audio)
if len(audio_clips) > 0:
final_audio_clip = CompositeAudioClip(audio_clips)
# XXX: Workaround for exception: 'CompositeAudioClip' object has no attribute 'fps'.
# See: https://github.com/Zulko/moviepy/issues/863
# final_audio_clip.fps = 44100
final_clip = final_clip.set_audio(final_audio_clip)
# final_clip.show(10.5, interactive=True)
os.makedirs("tmp/out", exist_ok=True)
if audio_only:
final_audio_clip.fps = 44100
final_audio_clip.write_audiofile("%s.mp3" % out_filename)
open_with("%s.mp3" % out_filename, program_id=0)
else:
final_clip.write_videofile(
"%s.mp4" % out_filename,
temp_audiofile="%s.mp3" % out_filename,
remove_temp=False,
codec="libx264",
threads=8,
fps=coreapi.FPS,
ffmpeg_params=["-crf", "19"],
)
subprocess.Popen(
["mpv", "--force-window", "--geometry=1920x1080", f"{out_filename}.mp4"],
close_fds=True,
)
def _adjust_mpy_audio_clip_volume(clip, vol_keypoints):
xp = []
fp = []
print("vol_keypoints:", vol_keypoints)
for (p, vol) in vol_keypoints:
if isinstance(vol, (int, float)):
xp.append(p)
fp.append(vol)
else:
raise Exception("unsupported bgm parameter type:" % type(vol))
def volume_adjust(gf, t):
factor = np.interp(t, xp, fp)
factor = np.vstack([factor, factor]).T
return factor * gf(t)
return clip.fl(volume_adjust)
# def _export_srt():
# with open("out.srt", "w", encoding="utf-8") as f:
# f.write("\n".join(_srt_lines))
def _convert_to_readable_time(seconds):
seconds = int(seconds)
seconds = seconds % (24 * 3600)
hour = seconds // 3600
seconds %= 3600
minutes = seconds // 60
seconds %= 60
if hour > 0:
return "%d:%02d:%02d" % (hour, minutes, seconds)
else:
return "%02d:%02d" % (minutes, seconds)
def _write_timestamp(t, section_name):
os.makedirs(os.path.dirname(out_filename), exist_ok=True)
if not hasattr(_write_timestamp, "f"):
_write_timestamp.f = open("%s.txt" % out_filename, "w", encoding="utf-8")
_write_timestamp.f.write("%s (%s)\n" % (section_name, _convert_to_readable_time(t)))
_write_timestamp.f.flush()
@core.api
def include(file):
with open(file, "r", encoding="utf-8") as f:
s = f.read()
cwd = os.getcwd()
os.chdir(os.path.dirname(os.path.abspath(file)))
_parse_text(s)
os.chdir(cwd)
def _remove_unused_recordings(s):
used_recordings = set()
unused_recordings = []
apis = {"record": (lambda f, **kargs: used_recordings.add(f))}
_parse_text(s, apis=apis)
files = [f for f in glob.glob("record/*") if os.path.isfile(f)]
files = [f.replace("\\", "/") for f in files]
for f in files:
if f not in used_recordings:
unused_recordings.append(f)
print2("Used : %d" % len(used_recordings), color="green")
print2("Unused : %d" % len(unused_recordings), color="red")
assert len(used_recordings) + len(unused_recordings) == len(files)
print("Press y to clean up: ", end="", flush=True)
if getch() == "y":
for f in unused_recordings:
try:
os.remove(f)
except:
print("WARNING: failed to remove: %s" % f)
def _parse_text(text, apis=core.apis, **kwargs):
def find_next(text, needle, p):
pos = text.find(needle, p)
if pos < 0:
pos = len(text)
return pos
# Remove all comments
text = re.sub(r"<!--[\d\D]*?-->", "", text)
p = 0 # Current position
while p < len(text):
if text[p : p + 2] == "{{":
end = find_next(text, "}}", p)
python_code = text[p + 2 : end].strip()
p = end + 2
if ignore_undefined:
try:
exec(python_code, apis)
except NameError: # API is not defined
pass # simply ignore
else:
exec(python_code, apis)
continue
if text[p : p + 1] == "#":
end = find_next(text, "\n", p)
line = text[p:end].strip()
_write_timestamp(coreapi.pos_dict["a"], line)
p = end + 1
continue
match = re.match("---((?:[0-9]*[.])?[0-9]+)?\n", text[p:])
if match is not None:
if match.group(1) is not None:
coreapi.audio_gap(float(match.group(1)))
else:
coreapi.audio_gap(0.2)
p += match.end(0) + 1
continue
# Parse regular text
end = find_next(text, "\n", p)
line = text[p:end].strip()
p = end + 1
if line != "" and "parse_line" in apis:
apis["parse_line"](line)
# Call it at the end
core.on_api_func(None)
def _show_stats(s):
TIME_PER_CHAR = 0.1334154351395731
total = 0
def parse_line(line):
nonlocal total
total += len(line)
_parse_text(s, apis={"parse_line": parse_line}, ignore_undefined=True)
total_secs = TIME_PER_CHAR * total
print("Estimated Time: %s" % format_time(total_secs))
input()
def load_config():
import yaml
CONFIG_FILE = "config.yaml"
DEFAULT_CONFIG = {"fps": 30}
if os.path.exists(CONFIG_FILE):
with open(CONFIG_FILE, "r") as f:
config = yaml.load(f.read(), Loader=yaml.FullLoader)
else:
with open(CONFIG_FILE, "w", newline="\n") as f:
yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)
config = DEFAULT_CONFIG
coreapi.fps(config["fps"])
if __name__ == "__main__":
out_filename = "tmp/out/" + get_time_str()
parser = argparse.ArgumentParser()
parser.add_argument("--stdin", default=False, action="store_true")
parser.add_argument("--proj_dir", type=str, default=None)
parser.add_argument("-i", "--input", type=str, default=None)
parser.add_argument("-a", "--audio_only", action="store_true", default=False)
parser.add_argument(
"--remove_unused_recordings", action="store_true", default=False
)
parser.add_argument("--show_stats", action="store_true", default=False)
parser.add_argument("--preview", action="store_true", default=False)
args = parser.parse_args()
if args.proj_dir is not None:
os.chdir(args.proj_dir)
elif args.input:
os.chdir(os.path.dirname(args.input))
print("Project dir: %s" % os.getcwd())
# Load custom APIs (api.py) if exists
if os.path.exists("api.py"):
sys.path.append(os.getcwd())
mymodule = importlib.import_module("api")
global_functions = inspect.getmembers(mymodule, inspect.isfunction)
core.apis.update({k: v for k, v in global_functions})
# HACK
if args.audio_only:
coreapi.audio_only()
# Read text
if args.stdin:
s = sys.stdin.read()
elif args.input:
with open(args.input, "r", encoding="utf-8") as f:
s = f.read()
else:
raise Exception("Either --stdin or --input should be specified.")
load_config()
if args.preview:
coreapi.preview()
if args.remove_unused_recordings:
ignore_undefined = True
_remove_unused_recordings(s)
elif args.show_stats:
ignore_undefined = True
_show_stats(s)
else:
_parse_text(s, apis=core.apis)
_export_video(resolution=(1920, 1080), audio_only=args.audio_only)
|
flexible
|
{
"blob_id": "9e21a39358d97633b49ad83805990c29c19a80ed",
"index": 8599,
"step-1": "<mask token>\n\n\ndef _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,\n scale, vol, **kwargs):\n assert duration is not None\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -\n subclip_duration)\n clip = concatenate_videoclips([c1, c2])\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(\n duration)\n if speed is not None:\n clip = clip.fx(vfx.speedx, speed)\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n if loop:\n clip = clip.fx(vfx.loop)\n if subclip is None:\n clip = clip.set_duration(duration)\n if pos is not None:\n if pos == 'center':\n clip = clip.set_position(('center', 'center'))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [(x // 2) for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n return clip\n\n\ndef _update_clip_duration(track):\n\n def is_connected(prev_clip, cur_clip):\n return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.\n start, rel_tol=0.001)\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = (clip_info.start - prev_clip_info\n .start)\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info\n , clip_info):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n prev_clip_info = clip_info\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n if 're' in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict['re'] - clip_info\n .start)\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n audio_clips = []\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(clip_info.subclip[0],\n clip_info.subclip[1])\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n if clip_info.norm:\n audio_clip = audio_clip.fx(afx.audio_normalize)\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,\n clip_info.vol)\n audio_clips.append(audio_clip)\n crossfade_duration = track[i + 1].crossfade if i < len(track\n ) - 1 else 0\n if crossfade_duration:\n clip_info.duration += crossfade_duration\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **\n vars(clip_info))\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,\n clip_info.fadein)\n elif clip_info.crossfade > 0:\n video_clips.append(clip_info.mpy_clip.set_duration(\n clip_info.crossfade).crossfadein(clip_info.crossfade).\n set_start(clip_info.start))\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.\n crossfade)\n clip_info.start += clip_info.crossfade\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,\n clip_info.fadeout)\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).\n set_duration(2))\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(duration, clip_info.subclip[1] -\n clip_info.subclip[0])\n clip = clip.set_duration(duration)\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.\n vol_keypoints)\n clips.append(clip)\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n final_clip = final_clip.set_audio(final_audio_clip)\n os.makedirs('tmp/out', exist_ok=True)\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile('%s.mp3' % out_filename)\n open_with('%s.mp3' % out_filename, program_id=0)\n else:\n final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=\n '%s.mp3' % out_filename, remove_temp=False, codec='libx264',\n threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])\n subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',\n f'{out_filename}.mp4'], close_fds=True)\n\n\n<mask token>\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n if hour > 0:\n return '%d:%02d:%02d' % (hour, minutes, seconds)\n else:\n return '%02d:%02d' % (minutes, seconds)\n\n\n<mask token>\n\n\[email protected]\ndef include(file):\n with open(file, 'r', encoding='utf-8') as f:\n s = f.read()\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n apis = {'record': lambda f, **kargs: used_recordings.add(f)}\n _parse_text(s, apis=apis)\n files = [f for f in glob.glob('record/*') if os.path.isfile(f)]\n files = [f.replace('\\\\', '/') for f in files]\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n print2('Used : %d' % len(used_recordings), color='green')\n print2('Unused : %d' % len(unused_recordings), color='red')\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print('Press y to clean up: ', end='', flush=True)\n if getch() == 'y':\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print('WARNING: failed to remove: %s' % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n text = re.sub('<!--[\\\\d\\\\D]*?-->', '', text)\n p = 0\n while p < len(text):\n if text[p:p + 2] == '{{':\n end = find_next(text, '}}', p)\n python_code = text[p + 2:end].strip()\n p = end + 2\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError:\n pass\n else:\n exec(python_code, apis)\n continue\n if text[p:p + 1] == '#':\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict['a'], line)\n p = end + 1\n continue\n match = re.match('---((?:[0-9]*[.])?[0-9]+)?\\n', text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n p = end + 1\n if line != '' and 'parse_line' in apis:\n apis['parse_line'](line)\n core.on_api_func(None)\n\n\n<mask token>\n\n\ndef load_config():\n import yaml\n CONFIG_FILE = 'config.yaml'\n DEFAULT_CONFIG = {'fps': 30}\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, 'r') as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, 'w', newline='\\n') as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n coreapi.fps(config['fps'])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,\n scale, vol, **kwargs):\n assert duration is not None\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -\n subclip_duration)\n clip = concatenate_videoclips([c1, c2])\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(\n duration)\n if speed is not None:\n clip = clip.fx(vfx.speedx, speed)\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n if loop:\n clip = clip.fx(vfx.loop)\n if subclip is None:\n clip = clip.set_duration(duration)\n if pos is not None:\n if pos == 'center':\n clip = clip.set_position(('center', 'center'))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [(x // 2) for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n return clip\n\n\ndef _update_clip_duration(track):\n\n def is_connected(prev_clip, cur_clip):\n return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.\n start, rel_tol=0.001)\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = (clip_info.start - prev_clip_info\n .start)\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info\n , clip_info):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n prev_clip_info = clip_info\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n if 're' in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict['re'] - clip_info\n .start)\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n audio_clips = []\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(clip_info.subclip[0],\n clip_info.subclip[1])\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n if clip_info.norm:\n audio_clip = audio_clip.fx(afx.audio_normalize)\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,\n clip_info.vol)\n audio_clips.append(audio_clip)\n crossfade_duration = track[i + 1].crossfade if i < len(track\n ) - 1 else 0\n if crossfade_duration:\n clip_info.duration += crossfade_duration\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **\n vars(clip_info))\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,\n clip_info.fadein)\n elif clip_info.crossfade > 0:\n video_clips.append(clip_info.mpy_clip.set_duration(\n clip_info.crossfade).crossfadein(clip_info.crossfade).\n set_start(clip_info.start))\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.\n crossfade)\n clip_info.start += clip_info.crossfade\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,\n clip_info.fadeout)\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).\n set_duration(2))\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(duration, clip_info.subclip[1] -\n clip_info.subclip[0])\n clip = clip.set_duration(duration)\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.\n vol_keypoints)\n clips.append(clip)\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n final_clip = final_clip.set_audio(final_audio_clip)\n os.makedirs('tmp/out', exist_ok=True)\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile('%s.mp3' % out_filename)\n open_with('%s.mp3' % out_filename, program_id=0)\n else:\n final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=\n '%s.mp3' % out_filename, remove_temp=False, codec='libx264',\n threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])\n subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',\n f'{out_filename}.mp4'], close_fds=True)\n\n\ndef _adjust_mpy_audio_clip_volume(clip, vol_keypoints):\n xp = []\n fp = []\n print('vol_keypoints:', vol_keypoints)\n for p, vol in vol_keypoints:\n if isinstance(vol, (int, float)):\n xp.append(p)\n fp.append(vol)\n else:\n raise Exception('unsupported bgm parameter type:' % type(vol))\n\n def volume_adjust(gf, t):\n factor = np.interp(t, xp, fp)\n factor = np.vstack([factor, factor]).T\n return factor * gf(t)\n return clip.fl(volume_adjust)\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n if hour > 0:\n return '%d:%02d:%02d' % (hour, minutes, seconds)\n else:\n return '%02d:%02d' % (minutes, seconds)\n\n\n<mask token>\n\n\[email protected]\ndef include(file):\n with open(file, 'r', encoding='utf-8') as f:\n s = f.read()\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n apis = {'record': lambda f, **kargs: used_recordings.add(f)}\n _parse_text(s, apis=apis)\n files = [f for f in glob.glob('record/*') if os.path.isfile(f)]\n files = [f.replace('\\\\', '/') for f in files]\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n print2('Used : %d' % len(used_recordings), color='green')\n print2('Unused : %d' % len(unused_recordings), color='red')\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print('Press y to clean up: ', end='', flush=True)\n if getch() == 'y':\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print('WARNING: failed to remove: %s' % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n text = re.sub('<!--[\\\\d\\\\D]*?-->', '', text)\n p = 0\n while p < len(text):\n if text[p:p + 2] == '{{':\n end = find_next(text, '}}', p)\n python_code = text[p + 2:end].strip()\n p = end + 2\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError:\n pass\n else:\n exec(python_code, apis)\n continue\n if text[p:p + 1] == '#':\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict['a'], line)\n p = end + 1\n continue\n match = re.match('---((?:[0-9]*[.])?[0-9]+)?\\n', text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n p = end + 1\n if line != '' and 'parse_line' in apis:\n apis['parse_line'](line)\n core.on_api_func(None)\n\n\n<mask token>\n\n\ndef load_config():\n import yaml\n CONFIG_FILE = 'config.yaml'\n DEFAULT_CONFIG = {'fps': 30}\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, 'r') as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, 'w', newline='\\n') as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n coreapi.fps(config['fps'])\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,\n scale, vol, **kwargs):\n assert duration is not None\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -\n subclip_duration)\n clip = concatenate_videoclips([c1, c2])\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(\n duration)\n if speed is not None:\n clip = clip.fx(vfx.speedx, speed)\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n if loop:\n clip = clip.fx(vfx.loop)\n if subclip is None:\n clip = clip.set_duration(duration)\n if pos is not None:\n if pos == 'center':\n clip = clip.set_position(('center', 'center'))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [(x // 2) for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n return clip\n\n\ndef _update_clip_duration(track):\n\n def is_connected(prev_clip, cur_clip):\n return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.\n start, rel_tol=0.001)\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = (clip_info.start - prev_clip_info\n .start)\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info\n , clip_info):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n prev_clip_info = clip_info\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n if 're' in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict['re'] - clip_info\n .start)\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n audio_clips = []\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(clip_info.subclip[0],\n clip_info.subclip[1])\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n if clip_info.norm:\n audio_clip = audio_clip.fx(afx.audio_normalize)\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,\n clip_info.vol)\n audio_clips.append(audio_clip)\n crossfade_duration = track[i + 1].crossfade if i < len(track\n ) - 1 else 0\n if crossfade_duration:\n clip_info.duration += crossfade_duration\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **\n vars(clip_info))\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,\n clip_info.fadein)\n elif clip_info.crossfade > 0:\n video_clips.append(clip_info.mpy_clip.set_duration(\n clip_info.crossfade).crossfadein(clip_info.crossfade).\n set_start(clip_info.start))\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.\n crossfade)\n clip_info.start += clip_info.crossfade\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,\n clip_info.fadeout)\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).\n set_duration(2))\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(duration, clip_info.subclip[1] -\n clip_info.subclip[0])\n clip = clip.set_duration(duration)\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.\n vol_keypoints)\n clips.append(clip)\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n final_clip = final_clip.set_audio(final_audio_clip)\n os.makedirs('tmp/out', exist_ok=True)\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile('%s.mp3' % out_filename)\n open_with('%s.mp3' % out_filename, program_id=0)\n else:\n final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=\n '%s.mp3' % out_filename, remove_temp=False, codec='libx264',\n threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])\n subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',\n f'{out_filename}.mp4'], close_fds=True)\n\n\ndef _adjust_mpy_audio_clip_volume(clip, vol_keypoints):\n xp = []\n fp = []\n print('vol_keypoints:', vol_keypoints)\n for p, vol in vol_keypoints:\n if isinstance(vol, (int, float)):\n xp.append(p)\n fp.append(vol)\n else:\n raise Exception('unsupported bgm parameter type:' % type(vol))\n\n def volume_adjust(gf, t):\n factor = np.interp(t, xp, fp)\n factor = np.vstack([factor, factor]).T\n return factor * gf(t)\n return clip.fl(volume_adjust)\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n if hour > 0:\n return '%d:%02d:%02d' % (hour, minutes, seconds)\n else:\n return '%02d:%02d' % (minutes, seconds)\n\n\ndef _write_timestamp(t, section_name):\n os.makedirs(os.path.dirname(out_filename), exist_ok=True)\n if not hasattr(_write_timestamp, 'f'):\n _write_timestamp.f = open('%s.txt' % out_filename, 'w', encoding=\n 'utf-8')\n _write_timestamp.f.write('%s (%s)\\n' % (section_name,\n _convert_to_readable_time(t)))\n _write_timestamp.f.flush()\n\n\[email protected]\ndef include(file):\n with open(file, 'r', encoding='utf-8') as f:\n s = f.read()\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n apis = {'record': lambda f, **kargs: used_recordings.add(f)}\n _parse_text(s, apis=apis)\n files = [f for f in glob.glob('record/*') if os.path.isfile(f)]\n files = [f.replace('\\\\', '/') for f in files]\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n print2('Used : %d' % len(used_recordings), color='green')\n print2('Unused : %d' % len(unused_recordings), color='red')\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print('Press y to clean up: ', end='', flush=True)\n if getch() == 'y':\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print('WARNING: failed to remove: %s' % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n text = re.sub('<!--[\\\\d\\\\D]*?-->', '', text)\n p = 0\n while p < len(text):\n if text[p:p + 2] == '{{':\n end = find_next(text, '}}', p)\n python_code = text[p + 2:end].strip()\n p = end + 2\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError:\n pass\n else:\n exec(python_code, apis)\n continue\n if text[p:p + 1] == '#':\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict['a'], line)\n p = end + 1\n continue\n match = re.match('---((?:[0-9]*[.])?[0-9]+)?\\n', text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n p = end + 1\n if line != '' and 'parse_line' in apis:\n apis['parse_line'](line)\n core.on_api_func(None)\n\n\n<mask token>\n\n\ndef load_config():\n import yaml\n CONFIG_FILE = 'config.yaml'\n DEFAULT_CONFIG = {'fps': 30}\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, 'r') as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, 'w', newline='\\n') as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n coreapi.fps(config['fps'])\n\n\n<mask token>\n",
"step-4": "<mask token>\nif 1:\n change_settings({'FFMPEG_BINARY': get_executable('ffmpeg')})\n\n\ndef _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,\n scale, vol, **kwargs):\n assert duration is not None\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -\n subclip_duration)\n clip = concatenate_videoclips([c1, c2])\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(\n duration)\n if speed is not None:\n clip = clip.fx(vfx.speedx, speed)\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n if loop:\n clip = clip.fx(vfx.loop)\n if subclip is None:\n clip = clip.set_duration(duration)\n if pos is not None:\n if pos == 'center':\n clip = clip.set_position(('center', 'center'))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [(x // 2) for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n return clip\n\n\ndef _update_clip_duration(track):\n\n def is_connected(prev_clip, cur_clip):\n return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.\n start, rel_tol=0.001)\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = (clip_info.start - prev_clip_info\n .start)\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info\n , clip_info):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n prev_clip_info = clip_info\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n if 're' in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict['re'] - clip_info\n .start)\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n audio_clips = []\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(clip_info.subclip[0],\n clip_info.subclip[1])\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n if clip_info.norm:\n audio_clip = audio_clip.fx(afx.audio_normalize)\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,\n clip_info.vol)\n audio_clips.append(audio_clip)\n crossfade_duration = track[i + 1].crossfade if i < len(track\n ) - 1 else 0\n if crossfade_duration:\n clip_info.duration += crossfade_duration\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **\n vars(clip_info))\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,\n clip_info.fadein)\n elif clip_info.crossfade > 0:\n video_clips.append(clip_info.mpy_clip.set_duration(\n clip_info.crossfade).crossfadein(clip_info.crossfade).\n set_start(clip_info.start))\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.\n crossfade)\n clip_info.start += clip_info.crossfade\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,\n clip_info.fadeout)\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).\n set_duration(2))\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(duration, clip_info.subclip[1] -\n clip_info.subclip[0])\n clip = clip.set_duration(duration)\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.\n vol_keypoints)\n clips.append(clip)\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n final_clip = final_clip.set_audio(final_audio_clip)\n os.makedirs('tmp/out', exist_ok=True)\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile('%s.mp3' % out_filename)\n open_with('%s.mp3' % out_filename, program_id=0)\n else:\n final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=\n '%s.mp3' % out_filename, remove_temp=False, codec='libx264',\n threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])\n subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',\n f'{out_filename}.mp4'], close_fds=True)\n\n\ndef _adjust_mpy_audio_clip_volume(clip, vol_keypoints):\n xp = []\n fp = []\n print('vol_keypoints:', vol_keypoints)\n for p, vol in vol_keypoints:\n if isinstance(vol, (int, float)):\n xp.append(p)\n fp.append(vol)\n else:\n raise Exception('unsupported bgm parameter type:' % type(vol))\n\n def volume_adjust(gf, t):\n factor = np.interp(t, xp, fp)\n factor = np.vstack([factor, factor]).T\n return factor * gf(t)\n return clip.fl(volume_adjust)\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n if hour > 0:\n return '%d:%02d:%02d' % (hour, minutes, seconds)\n else:\n return '%02d:%02d' % (minutes, seconds)\n\n\ndef _write_timestamp(t, section_name):\n os.makedirs(os.path.dirname(out_filename), exist_ok=True)\n if not hasattr(_write_timestamp, 'f'):\n _write_timestamp.f = open('%s.txt' % out_filename, 'w', encoding=\n 'utf-8')\n _write_timestamp.f.write('%s (%s)\\n' % (section_name,\n _convert_to_readable_time(t)))\n _write_timestamp.f.flush()\n\n\[email protected]\ndef include(file):\n with open(file, 'r', encoding='utf-8') as f:\n s = f.read()\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n apis = {'record': lambda f, **kargs: used_recordings.add(f)}\n _parse_text(s, apis=apis)\n files = [f for f in glob.glob('record/*') if os.path.isfile(f)]\n files = [f.replace('\\\\', '/') for f in files]\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n print2('Used : %d' % len(used_recordings), color='green')\n print2('Unused : %d' % len(unused_recordings), color='red')\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print('Press y to clean up: ', end='', flush=True)\n if getch() == 'y':\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print('WARNING: failed to remove: %s' % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n text = re.sub('<!--[\\\\d\\\\D]*?-->', '', text)\n p = 0\n while p < len(text):\n if text[p:p + 2] == '{{':\n end = find_next(text, '}}', p)\n python_code = text[p + 2:end].strip()\n p = end + 2\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError:\n pass\n else:\n exec(python_code, apis)\n continue\n if text[p:p + 1] == '#':\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict['a'], line)\n p = end + 1\n continue\n match = re.match('---((?:[0-9]*[.])?[0-9]+)?\\n', text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n p = end + 1\n if line != '' and 'parse_line' in apis:\n apis['parse_line'](line)\n core.on_api_func(None)\n\n\ndef _show_stats(s):\n TIME_PER_CHAR = 0.1334154351395731\n total = 0\n\n def parse_line(line):\n nonlocal total\n total += len(line)\n _parse_text(s, apis={'parse_line': parse_line}, ignore_undefined=True)\n total_secs = TIME_PER_CHAR * total\n print('Estimated Time: %s' % format_time(total_secs))\n input()\n\n\ndef load_config():\n import yaml\n CONFIG_FILE = 'config.yaml'\n DEFAULT_CONFIG = {'fps': 30}\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, 'r') as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, 'w', newline='\\n') as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n coreapi.fps(config['fps'])\n\n\nif __name__ == '__main__':\n out_filename = 'tmp/out/' + get_time_str()\n parser = argparse.ArgumentParser()\n parser.add_argument('--stdin', default=False, action='store_true')\n parser.add_argument('--proj_dir', type=str, default=None)\n parser.add_argument('-i', '--input', type=str, default=None)\n parser.add_argument('-a', '--audio_only', action='store_true', default=\n False)\n parser.add_argument('--remove_unused_recordings', action='store_true',\n default=False)\n parser.add_argument('--show_stats', action='store_true', default=False)\n parser.add_argument('--preview', action='store_true', default=False)\n args = parser.parse_args()\n if args.proj_dir is not None:\n os.chdir(args.proj_dir)\n elif args.input:\n os.chdir(os.path.dirname(args.input))\n print('Project dir: %s' % os.getcwd())\n if os.path.exists('api.py'):\n sys.path.append(os.getcwd())\n mymodule = importlib.import_module('api')\n global_functions = inspect.getmembers(mymodule, inspect.isfunction)\n core.apis.update({k: v for k, v in global_functions})\n if args.audio_only:\n coreapi.audio_only()\n if args.stdin:\n s = sys.stdin.read()\n elif args.input:\n with open(args.input, 'r', encoding='utf-8') as f:\n s = f.read()\n else:\n raise Exception('Either --stdin or --input should be specified.')\n load_config()\n if args.preview:\n coreapi.preview()\n if args.remove_unused_recordings:\n ignore_undefined = True\n _remove_unused_recordings(s)\n elif args.show_stats:\n ignore_undefined = True\n _show_stats(s)\n else:\n _parse_text(s, apis=core.apis)\n _export_video(resolution=(1920, 1080), audio_only=args.audio_only)\n",
"step-5": "import argparse\nimport glob\nimport importlib\nimport inspect\nimport math\nimport os\nimport re\nimport subprocess\nimport sys\n\nimport moviepy.audio.fx.all as afx\nimport moviepy.video.fx.all as vfx\nimport numpy as np\nfrom _appmanager import get_executable\nfrom _shutil import format_time, get_time_str, getch, print2\nfrom moviepy.config import change_settings\nfrom moviepy.editor import *\nfrom open_with.open_with import open_with\n\nimport codeapi\nimport core\nimport coreapi\nimport datastruct\n\nSCRIPT_ROOT = os.path.dirname(os.path.abspath(__file__))\n\nignore_undefined = False\n\nif 1:\n change_settings({\"FFMPEG_BINARY\": get_executable(\"ffmpeg\")})\n\n\n# def _get_markers(file):\n# marker_file = file + \".marker.txt\"\n# if os.path.exists(marker_file):\n# with open(marker_file, \"r\") as f:\n# s = f.read()\n# return [float(x) for x in s.split()]\n# else:\n# return None\n\n\n# def _load_and_expand_img(f):\n# fg = Image.open(f).convert(\"RGBA\")\n# bg = Image.new(\"RGB\", (1920, 1080))\n# bg.paste(fg, ((bg.width - fg.width) // 2, (bg.height - fg.height) // 2), fg)\n# return np.array(bg)\n\n\ndef _update_mpy_clip(\n clip, subclip, speed, frame, norm, loop, duration, pos, scale, vol, **kwargs,\n):\n assert duration is not None\n\n # video clip operations / fx\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(\n duration - subclip_duration\n )\n clip = concatenate_videoclips([c1, c2])\n\n # HACK: workaround for a bug: 'CompositeAudioClip' object has no attribute 'fps'\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(duration)\n\n if speed is not None:\n clip = clip.fx(\n # pylint: disable=maybe-no-member\n vfx.speedx,\n speed,\n )\n\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n\n # Loop or change duration\n if loop:\n clip = clip.fx(\n # pylint: disable=maybe-no-member\n vfx.loop\n )\n\n if subclip is None:\n clip = clip.set_duration(duration)\n\n if pos is not None:\n # (x, y) marks the center location of the of the clip instead of the top\n # left corner.\n if pos == \"center\":\n clip = clip.set_position((\"center\", \"center\"))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [x // 2 for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n\n return clip\n\n\ndef _update_clip_duration(track):\n def is_connected(prev_clip, cur_clip):\n return math.isclose(\n prev_clip.start + prev_clip.duration, cur_clip.start, rel_tol=1e-3,\n )\n\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = clip_info.start - prev_clip_info.start\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n\n # Apply fadeout to previous clip if it's not connected with\n # current clip.\n if prev_clip_info.crossfade > 0 and not is_connected(\n prev_clip_info, clip_info\n ):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n prev_clip_info = clip_info\n\n # Update last clip duration\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n\n # Extend the last video clip to match the voice track\n if \"re\" in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict[\"re\"] - clip_info.start)\n\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n\n audio_clips = []\n\n # Update clip duration for each track\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n\n # TODO: post-process video track clips\n\n # Update MoviePy clip object in each track.\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n\n # Unlink audio clip from video clip (adjust audio duration)\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n\n # Audio timing\n # TODO: audio subclip\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(\n clip_info.subclip[0], clip_info.subclip[1]\n )\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n\n # Adjust volume\n if clip_info.norm:\n audio_clip = audio_clip.fx(\n # pylint: disable=maybe-no-member\n afx.audio_normalize\n )\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(\n # pylint: disable=maybe-no-member\n afx.volumex,\n clip_info.vol,\n )\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(\n audio_clip, clip_info.vol\n )\n\n audio_clips.append(audio_clip)\n\n # If the next clip has crossfade enabled\n crossfade_duration = track[i + 1].crossfade if (i < len(track) - 1) else 0\n if crossfade_duration:\n # clip_info.fadeout = crossfade_duration # Fadeout current clip\n clip_info.duration += crossfade_duration\n\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **vars(clip_info))\n\n # Deal with video fade in / out / crossfade\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n # TODO: crossfadein and crossfadeout is very slow in moviepy\n if track_name != \"vid\":\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein\n )\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(\n # pylint: disable=maybe-no-member\n vfx.fadein,\n clip_info.fadein,\n )\n\n elif (\n clip_info.crossfade > 0\n ): # crossfade and fadein should not happen at the same time\n video_clips.append(\n clip_info.mpy_clip.set_duration(clip_info.crossfade)\n .crossfadein(clip_info.crossfade)\n .set_start(clip_info.start)\n )\n\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.crossfade)\n clip_info.start += clip_info.crossfade\n\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != \"vid\":\n # pylint: disable=maybe-no-member\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout\n )\n else:\n\n clip_info.mpy_clip = clip_info.mpy_clip.fx(\n # pylint: disable=maybe-no-member\n vfx.fadeout,\n clip_info.fadeout,\n )\n\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).set_duration(2))\n # raise Exception(\"no video clips??\")\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n\n # Resize here is too late, does not speed up the video encoding at all.\n # final_clip = final_clip.resize(width=480)\n\n # Deal with audio clips\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n # HACK: reload the clip.\n #\n # still don't know why using loaded mpy_clip directly will cause\n # \"IndexError: index -200001 is out of bounds for axis 0 with\n # size 0\"...\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n # pylint: disable=maybe-no-member\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(\n duration, clip_info.subclip[1] - clip_info.subclip[0]\n )\n clip = clip.set_duration(duration)\n\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n\n # Adjust volume by keypoints\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.vol_keypoints)\n\n clips.append(clip)\n\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n\n # XXX: Workaround for exception: 'CompositeAudioClip' object has no attribute 'fps'.\n # See: https://github.com/Zulko/moviepy/issues/863\n # final_audio_clip.fps = 44100\n\n final_clip = final_clip.set_audio(final_audio_clip)\n\n # final_clip.show(10.5, interactive=True)\n\n os.makedirs(\"tmp/out\", exist_ok=True)\n\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile(\"%s.mp3\" % out_filename)\n open_with(\"%s.mp3\" % out_filename, program_id=0)\n\n else:\n final_clip.write_videofile(\n \"%s.mp4\" % out_filename,\n temp_audiofile=\"%s.mp3\" % out_filename,\n remove_temp=False,\n codec=\"libx264\",\n threads=8,\n fps=coreapi.FPS,\n ffmpeg_params=[\"-crf\", \"19\"],\n )\n\n subprocess.Popen(\n [\"mpv\", \"--force-window\", \"--geometry=1920x1080\", f\"{out_filename}.mp4\"],\n close_fds=True,\n )\n\n\ndef _adjust_mpy_audio_clip_volume(clip, vol_keypoints):\n xp = []\n fp = []\n\n print(\"vol_keypoints:\", vol_keypoints)\n for (p, vol) in vol_keypoints:\n if isinstance(vol, (int, float)):\n xp.append(p)\n fp.append(vol)\n else:\n raise Exception(\"unsupported bgm parameter type:\" % type(vol))\n\n def volume_adjust(gf, t):\n factor = np.interp(t, xp, fp)\n factor = np.vstack([factor, factor]).T\n return factor * gf(t)\n\n return clip.fl(volume_adjust)\n\n\n# def _export_srt():\n# with open(\"out.srt\", \"w\", encoding=\"utf-8\") as f:\n# f.write(\"\\n\".join(_srt_lines))\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n\n if hour > 0:\n return \"%d:%02d:%02d\" % (hour, minutes, seconds)\n else:\n return \"%02d:%02d\" % (minutes, seconds)\n\n\ndef _write_timestamp(t, section_name):\n os.makedirs(os.path.dirname(out_filename), exist_ok=True)\n\n if not hasattr(_write_timestamp, \"f\"):\n _write_timestamp.f = open(\"%s.txt\" % out_filename, \"w\", encoding=\"utf-8\")\n\n _write_timestamp.f.write(\"%s (%s)\\n\" % (section_name, _convert_to_readable_time(t)))\n _write_timestamp.f.flush()\n\n\[email protected]\ndef include(file):\n with open(file, \"r\", encoding=\"utf-8\") as f:\n s = f.read()\n\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n\n apis = {\"record\": (lambda f, **kargs: used_recordings.add(f))}\n _parse_text(s, apis=apis)\n\n files = [f for f in glob.glob(\"record/*\") if os.path.isfile(f)]\n files = [f.replace(\"\\\\\", \"/\") for f in files]\n\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n\n print2(\"Used : %d\" % len(used_recordings), color=\"green\")\n print2(\"Unused : %d\" % len(unused_recordings), color=\"red\")\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print(\"Press y to clean up: \", end=\"\", flush=True)\n if getch() == \"y\":\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print(\"WARNING: failed to remove: %s\" % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n\n # Remove all comments\n text = re.sub(r\"<!--[\\d\\D]*?-->\", \"\", text)\n\n p = 0 # Current position\n while p < len(text):\n if text[p : p + 2] == \"{{\":\n end = find_next(text, \"}}\", p)\n python_code = text[p + 2 : end].strip()\n p = end + 2\n\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError: # API is not defined\n pass # simply ignore\n else:\n exec(python_code, apis)\n\n continue\n\n if text[p : p + 1] == \"#\":\n end = find_next(text, \"\\n\", p)\n\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict[\"a\"], line)\n\n p = end + 1\n continue\n\n match = re.match(\"---((?:[0-9]*[.])?[0-9]+)?\\n\", text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n\n # Parse regular text\n end = find_next(text, \"\\n\", p)\n line = text[p:end].strip()\n p = end + 1\n\n if line != \"\" and \"parse_line\" in apis:\n apis[\"parse_line\"](line)\n\n # Call it at the end\n core.on_api_func(None)\n\n\ndef _show_stats(s):\n TIME_PER_CHAR = 0.1334154351395731\n\n total = 0\n\n def parse_line(line):\n nonlocal total\n total += len(line)\n\n _parse_text(s, apis={\"parse_line\": parse_line}, ignore_undefined=True)\n\n total_secs = TIME_PER_CHAR * total\n print(\"Estimated Time: %s\" % format_time(total_secs))\n\n input()\n\n\ndef load_config():\n import yaml\n\n CONFIG_FILE = \"config.yaml\"\n DEFAULT_CONFIG = {\"fps\": 30}\n\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, \"r\") as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, \"w\", newline=\"\\n\") as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n\n coreapi.fps(config[\"fps\"])\n\n\nif __name__ == \"__main__\":\n out_filename = \"tmp/out/\" + get_time_str()\n\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--stdin\", default=False, action=\"store_true\")\n parser.add_argument(\"--proj_dir\", type=str, default=None)\n parser.add_argument(\"-i\", \"--input\", type=str, default=None)\n parser.add_argument(\"-a\", \"--audio_only\", action=\"store_true\", default=False)\n parser.add_argument(\n \"--remove_unused_recordings\", action=\"store_true\", default=False\n )\n parser.add_argument(\"--show_stats\", action=\"store_true\", default=False)\n parser.add_argument(\"--preview\", action=\"store_true\", default=False)\n\n args = parser.parse_args()\n\n if args.proj_dir is not None:\n os.chdir(args.proj_dir)\n elif args.input:\n os.chdir(os.path.dirname(args.input))\n print(\"Project dir: %s\" % os.getcwd())\n\n # Load custom APIs (api.py) if exists\n if os.path.exists(\"api.py\"):\n sys.path.append(os.getcwd())\n mymodule = importlib.import_module(\"api\")\n global_functions = inspect.getmembers(mymodule, inspect.isfunction)\n core.apis.update({k: v for k, v in global_functions})\n\n # HACK\n if args.audio_only:\n coreapi.audio_only()\n\n # Read text\n if args.stdin:\n s = sys.stdin.read()\n\n elif args.input:\n with open(args.input, \"r\", encoding=\"utf-8\") as f:\n s = f.read()\n\n else:\n raise Exception(\"Either --stdin or --input should be specified.\")\n\n load_config()\n\n if args.preview:\n coreapi.preview()\n\n if args.remove_unused_recordings:\n ignore_undefined = True\n _remove_unused_recordings(s)\n elif args.show_stats:\n ignore_undefined = True\n _show_stats(s)\n else:\n _parse_text(s, apis=core.apis)\n _export_video(resolution=(1920, 1080), audio_only=args.audio_only)\n",
"step-ids": [
8,
9,
10,
12,
15
]
}
|
[
8,
9,
10,
12,
15
] |
<|reserved_special_token_0|>
def init_database(conn):
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS catalogs
(id INTEGER PRIMARY KEY AUTOINCREMENT, catalog_name TEXT)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS products
(id INTEGER PRIMARY KEY AUTOINCREMENT, sku_id INTEGER, catalog_id INTEGER, product_name TEXT, price FLOAT, description TEXT)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS users
(id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS products_bought
(id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER,product_id INTEGER)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS product_context
(id INTEGER PRIMARY KEY AUTOINCREMENT,recommendation_id INTEGER, product_id INTEGER, device TEXT, os TEXT, time_of_day TEXT, day_of_week TEXT, latitude float, longitude float,num_items_in_cart INTEGER, purchases_in_last_month INTEGER)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS recommendations
(id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER, product_id INTEGER, interacted BOOLEAN)"""
)
<|reserved_special_token_0|>
def get_users(conn):
c = conn.cursor()
c.execute('select * from users')
return c.fetchall()
def get_catalogs(conn):
c = conn.cursor()
c.execute('select * from catalogs')
return c.fetchall()
<|reserved_special_token_0|>
def get_product_by_id(conn, catalog_id, product_id):
c = conn.cursor()
c.execute('SELECT * FROM products WHERE catalog_id = ? AND id = ?', (
catalog_id, product_id))
return c.fetchall()
<|reserved_special_token_0|>
def get_all_data(conn):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id'
)
return c.fetchall()
def get_data_for_user(conn, userid):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ?'
, (userid,))
return c.fetchall()
<|reserved_special_token_0|>
def get_recommendations_by_product(conn, productId):
c = conn.cursor()
c.execute(
'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.product_id = ?'
, (productId,))
return c.fetchall()
<|reserved_special_token_0|>
def generate_context(product_id):
return [product_id, device[randint(0, len(device) - 1)], oses[randint(0,
len(oses) - 1)], times[randint(0, len(times) - 1)], days[randint(0,
len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[randint(0,
len(lons) - 1)], randint(0, 3), randint(0, 3)]
<|reserved_special_token_0|>
def get_probability(conn, x, giveny):
c = conn.cursor()
query = 'select count(*) from product_context where '
first = True
params = []
for key, val in x.items():
if not first:
query += ' and '
else:
first = False
query += str(key) + '=?'
params.append(str(val))
c.execute(query, params)
total = c.fetchone()[0]
for key, val in giveny.items():
query += ' and ' + str(key) + '=?'
params.append(str(val))
c.execute(query, params)
smaller = c.fetchone()[0]
if total == 0:
return 0
else:
return smaller / float(total)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def init_database(conn):
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS catalogs
(id INTEGER PRIMARY KEY AUTOINCREMENT, catalog_name TEXT)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS products
(id INTEGER PRIMARY KEY AUTOINCREMENT, sku_id INTEGER, catalog_id INTEGER, product_name TEXT, price FLOAT, description TEXT)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS users
(id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS products_bought
(id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER,product_id INTEGER)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS product_context
(id INTEGER PRIMARY KEY AUTOINCREMENT,recommendation_id INTEGER, product_id INTEGER, device TEXT, os TEXT, time_of_day TEXT, day_of_week TEXT, latitude float, longitude float,num_items_in_cart INTEGER, purchases_in_last_month INTEGER)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS recommendations
(id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER, product_id INTEGER, interacted BOOLEAN)"""
)
def load_fake_data(conn):
c = conn.cursor()
c.execute('DELETE FROM catalogs')
c.execute('DELETE FROM products')
c.execute('DELETE FROM users')
c.execute('DELETE FROM products_bought')
c.execute('DELETE FROM product_context')
c.execute('DELETE FROM recommendations')
catalogs = []
c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', ('BestBuy',))
catalogs.append(c.lastrowid)
c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', ('RiteAid',))
catalogs.append(c.lastrowid)
ppl = []
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Tomer',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Alex',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Matt',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Rachael',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Sam',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Joey',))
ppl.append(c.lastrowid)
products = []
for i in range(1, 20):
c.execute(
'INSERT INTO products (id,sku_id,catalog_id, product_name, price,description) VALUES (NULL,?,?,?,?,?)'
, (randint(1, 2000), catalogs[randint(0, len(catalogs) - 1)],
'Movie' + str(i), randint(1, 2000), 'Title' + str(i)))
products.append(c.lastrowid)
for i in range(1, 50):
c.execute(
'INSERT INTO products_bought (id,user_id, product_id) VALUES (NULL,?,?)'
, (ppl[randint(0, len(ppl) - 1)], products[randint(0, len(
products) - 1)]))
values = c.lastrowid, device[randint(0, len(device) - 1)], oses[randint
(0, len(oses) - 1)], times[randint(0, len(times) - 1)], days[
randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[
randint(0, len(lons) - 1)], randint(0, 5), randint(0, 30)
c.execute(
'INSERT INTO product_context (id,recommendation_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)'
, values)
for i in range(1, 1000):
product_id = products[randint(0, len(products) - 1)]
c.execute(
"INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'true')"
, (ppl[randint(0, len(ppl) - 1)], product_id))
values = c.lastrowid, product_id, device[randint(0, len(device) - 1)
], oses[randint(0, len(oses) - 1)], times[randint(0, len(times) -
1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) -
1)], lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)
c.execute(
'INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'
, values)
conn.commit()
<|reserved_special_token_0|>
def get_users(conn):
c = conn.cursor()
c.execute('select * from users')
return c.fetchall()
def get_catalogs(conn):
c = conn.cursor()
c.execute('select * from catalogs')
return c.fetchall()
def get_products(conn, catalog_id):
c = conn.cursor()
c.execute('select * from products where catalog_id = ?', (catalog_id,))
return c.fetchall()
def get_product_by_id(conn, catalog_id, product_id):
c = conn.cursor()
c.execute('SELECT * FROM products WHERE catalog_id = ? AND id = ?', (
catalog_id, product_id))
return c.fetchall()
def get_products_bought(conn, catalog_id):
c = conn.cursor()
c.execute(
'select pb.* from products_bought pb, catalogs cat, products p where pb.product_id = p.id and p.catalog_id = ?'
, (catalog_id,))
return c.fetchall()
def get_all_data(conn):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id'
)
return c.fetchall()
def get_data_for_user(conn, userid):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ?'
, (userid,))
return c.fetchall()
def get_data_for_user_and_catalog(conn, userid, catalogid):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ? and c.id = ?'
, (userid, catalogid))
return c.fetchall()
<|reserved_special_token_0|>
def get_recommendations_by_user(conn, userId):
c = conn.cursor()
c.execute(
'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.user_id = ?'
, (userId,))
return c.fetchall()
def get_recommendations_by_product(conn, productId):
c = conn.cursor()
c.execute(
'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.product_id = ?'
, (productId,))
return c.fetchall()
def get_connection():
return sqlite3.connect('recommendation_engine.db')
def generate_context(product_id):
return [product_id, device[randint(0, len(device) - 1)], oses[randint(0,
len(oses) - 1)], times[randint(0, len(times) - 1)], days[randint(0,
len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[randint(0,
len(lons) - 1)], randint(0, 3), randint(0, 3)]
def add_recommendation(conn, product_ids, user_ids, contexts):
ids = []
c = conn.cursor()
for i in range(0, len(product_ids)):
product_id = product_ids[i]
user_id = user_ids[i]
context = contexts[i]
c.execute(
"INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'false')"
, (user_id, product_id))
context.insert(0, c.lastrowid)
ids.append(c.lastrowid)
c.execute(
'INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'
, context)
conn.commit()
c.execute(
'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.id in (%s)'
% ','.join('?' * len(ids)), ids)
return c.fetchall()
def get_probability(conn, x, giveny):
c = conn.cursor()
query = 'select count(*) from product_context where '
first = True
params = []
for key, val in x.items():
if not first:
query += ' and '
else:
first = False
query += str(key) + '=?'
params.append(str(val))
c.execute(query, params)
total = c.fetchone()[0]
for key, val in giveny.items():
query += ' and ' + str(key) + '=?'
params.append(str(val))
c.execute(query, params)
smaller = c.fetchone()[0]
if total == 0:
return 0
else:
return smaller / float(total)
def load_test_data(conn):
c = conn.cursor()
c.execute('DELETE FROM catalogs')
c.execute('DELETE FROM products')
c.execute('DELETE FROM users')
c.execute('DELETE FROM products_bought')
c.execute('DELETE FROM product_context')
c.execute('DELETE FROM recommendations')
user_names = test_data.USER_NAMES
product_names = test_data.PRODUCT_NAMES
prices = test_data.POSSIBLE_PRICES
catalog_ids = []
c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', (
'MovieDatabase',))
catalog_ids.append(c.lastrowid)
user_ids = []
for user in user_names:
c.execute('INSERT INTO users (user_name) VALUES (?)', (user,))
user_ids.append(c.lastrowid)
product_ids = []
for product in product_names:
values = randint(1, 2000), catalog_ids[0], product, prices[randint(
0, len(prices) - 1)], 'desc'
c.execute(
'INSERT INTO products (id, sku_id, catalog_id, product_name, price, description) VALUES (NULL,?,?,?,?,?)'
, values)
product_ids.append(c.lastrowid)
for i in range(1, 50):
values = user_ids[randint(0, len(user_ids) - 1)], product_ids[randint
(0, len(product_ids) - 1)]
c.execute(
'INSERT INTO products_bought (id,user_id,product_id) VALUES (NULL,?,?)'
, values)
values = c.lastrowid, device[randint(0, len(device) - 1)], oses[randint
(0, len(oses) - 1)], times[randint(0, len(times) - 1)], days[
randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[
randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)
c.execute(
'INSERT INTO product_context (id,recommendation_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)'
, values)
for i in range(1, 1000):
product_id = product_ids[randint(0, len(product_ids) - 1)]
values = user_ids[randint(0, len(user_ids) - 1)], product_id
c.execute(
"INSERT INTO recommendations (id,user_id,product_id,interacted) VALUES (NULL,?,?,'True')"
, values)
values = c.lastrowid, product_id, device[randint(0, len(device) - 1)
], oses[randint(0, len(oses) - 1)], times[randint(0, len(times) -
1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) -
1)], lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)
c.execute(
'INSERT INTO product_context (id,recommendation_id,product_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'
, values)
conn.commit()
<|reserved_special_token_1|>
__author__ = 'tomer'
<|reserved_special_token_0|>
def init_database(conn):
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS catalogs
(id INTEGER PRIMARY KEY AUTOINCREMENT, catalog_name TEXT)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS products
(id INTEGER PRIMARY KEY AUTOINCREMENT, sku_id INTEGER, catalog_id INTEGER, product_name TEXT, price FLOAT, description TEXT)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS users
(id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS products_bought
(id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER,product_id INTEGER)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS product_context
(id INTEGER PRIMARY KEY AUTOINCREMENT,recommendation_id INTEGER, product_id INTEGER, device TEXT, os TEXT, time_of_day TEXT, day_of_week TEXT, latitude float, longitude float,num_items_in_cart INTEGER, purchases_in_last_month INTEGER)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS recommendations
(id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER, product_id INTEGER, interacted BOOLEAN)"""
)
def load_fake_data(conn):
c = conn.cursor()
c.execute('DELETE FROM catalogs')
c.execute('DELETE FROM products')
c.execute('DELETE FROM users')
c.execute('DELETE FROM products_bought')
c.execute('DELETE FROM product_context')
c.execute('DELETE FROM recommendations')
catalogs = []
c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', ('BestBuy',))
catalogs.append(c.lastrowid)
c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', ('RiteAid',))
catalogs.append(c.lastrowid)
ppl = []
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Tomer',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Alex',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Matt',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Rachael',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Sam',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Joey',))
ppl.append(c.lastrowid)
products = []
for i in range(1, 20):
c.execute(
'INSERT INTO products (id,sku_id,catalog_id, product_name, price,description) VALUES (NULL,?,?,?,?,?)'
, (randint(1, 2000), catalogs[randint(0, len(catalogs) - 1)],
'Movie' + str(i), randint(1, 2000), 'Title' + str(i)))
products.append(c.lastrowid)
for i in range(1, 50):
c.execute(
'INSERT INTO products_bought (id,user_id, product_id) VALUES (NULL,?,?)'
, (ppl[randint(0, len(ppl) - 1)], products[randint(0, len(
products) - 1)]))
values = c.lastrowid, device[randint(0, len(device) - 1)], oses[randint
(0, len(oses) - 1)], times[randint(0, len(times) - 1)], days[
randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[
randint(0, len(lons) - 1)], randint(0, 5), randint(0, 30)
c.execute(
'INSERT INTO product_context (id,recommendation_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)'
, values)
for i in range(1, 1000):
product_id = products[randint(0, len(products) - 1)]
c.execute(
"INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'true')"
, (ppl[randint(0, len(ppl) - 1)], product_id))
values = c.lastrowid, product_id, device[randint(0, len(device) - 1)
], oses[randint(0, len(oses) - 1)], times[randint(0, len(times) -
1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) -
1)], lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)
c.execute(
'INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'
, values)
conn.commit()
oses = ['IOS', 'Android']
device = ['mobile']
<|reserved_special_token_0|>
times = ['morning', 'afternoon', 'night']
days = ['M']
<|reserved_special_token_0|>
lats = ['north']
<|reserved_special_token_0|>
lons = ['east']
def get_users(conn):
c = conn.cursor()
c.execute('select * from users')
return c.fetchall()
def get_catalogs(conn):
c = conn.cursor()
c.execute('select * from catalogs')
return c.fetchall()
def get_products(conn, catalog_id):
c = conn.cursor()
c.execute('select * from products where catalog_id = ?', (catalog_id,))
return c.fetchall()
def get_product_by_id(conn, catalog_id, product_id):
c = conn.cursor()
c.execute('SELECT * FROM products WHERE catalog_id = ? AND id = ?', (
catalog_id, product_id))
return c.fetchall()
def get_products_bought(conn, catalog_id):
c = conn.cursor()
c.execute(
'select pb.* from products_bought pb, catalogs cat, products p where pb.product_id = p.id and p.catalog_id = ?'
, (catalog_id,))
return c.fetchall()
def get_all_data(conn):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id'
)
return c.fetchall()
def get_data_for_user(conn, userid):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ?'
, (userid,))
return c.fetchall()
def get_data_for_user_and_catalog(conn, userid, catalogid):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ? and c.id = ?'
, (userid, catalogid))
return c.fetchall()
def get_transactions_for_catalog(conn, catalogid):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and c.id = ?'
, (catalogid,))
return c.fetchall()
def get_recommendations_by_user(conn, userId):
c = conn.cursor()
c.execute(
'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.user_id = ?'
, (userId,))
return c.fetchall()
def get_recommendations_by_product(conn, productId):
c = conn.cursor()
c.execute(
'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.product_id = ?'
, (productId,))
return c.fetchall()
def get_connection():
return sqlite3.connect('recommendation_engine.db')
def generate_context(product_id):
return [product_id, device[randint(0, len(device) - 1)], oses[randint(0,
len(oses) - 1)], times[randint(0, len(times) - 1)], days[randint(0,
len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[randint(0,
len(lons) - 1)], randint(0, 3), randint(0, 3)]
def add_recommendation(conn, product_ids, user_ids, contexts):
ids = []
c = conn.cursor()
for i in range(0, len(product_ids)):
product_id = product_ids[i]
user_id = user_ids[i]
context = contexts[i]
c.execute(
"INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'false')"
, (user_id, product_id))
context.insert(0, c.lastrowid)
ids.append(c.lastrowid)
c.execute(
'INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'
, context)
conn.commit()
c.execute(
'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.id in (%s)'
% ','.join('?' * len(ids)), ids)
return c.fetchall()
def get_probability(conn, x, giveny):
c = conn.cursor()
query = 'select count(*) from product_context where '
first = True
params = []
for key, val in x.items():
if not first:
query += ' and '
else:
first = False
query += str(key) + '=?'
params.append(str(val))
c.execute(query, params)
total = c.fetchone()[0]
for key, val in giveny.items():
query += ' and ' + str(key) + '=?'
params.append(str(val))
c.execute(query, params)
smaller = c.fetchone()[0]
if total == 0:
return 0
else:
return smaller / float(total)
def load_test_data(conn):
c = conn.cursor()
c.execute('DELETE FROM catalogs')
c.execute('DELETE FROM products')
c.execute('DELETE FROM users')
c.execute('DELETE FROM products_bought')
c.execute('DELETE FROM product_context')
c.execute('DELETE FROM recommendations')
user_names = test_data.USER_NAMES
product_names = test_data.PRODUCT_NAMES
prices = test_data.POSSIBLE_PRICES
catalog_ids = []
c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', (
'MovieDatabase',))
catalog_ids.append(c.lastrowid)
user_ids = []
for user in user_names:
c.execute('INSERT INTO users (user_name) VALUES (?)', (user,))
user_ids.append(c.lastrowid)
product_ids = []
for product in product_names:
values = randint(1, 2000), catalog_ids[0], product, prices[randint(
0, len(prices) - 1)], 'desc'
c.execute(
'INSERT INTO products (id, sku_id, catalog_id, product_name, price, description) VALUES (NULL,?,?,?,?,?)'
, values)
product_ids.append(c.lastrowid)
for i in range(1, 50):
values = user_ids[randint(0, len(user_ids) - 1)], product_ids[randint
(0, len(product_ids) - 1)]
c.execute(
'INSERT INTO products_bought (id,user_id,product_id) VALUES (NULL,?,?)'
, values)
values = c.lastrowid, device[randint(0, len(device) - 1)], oses[randint
(0, len(oses) - 1)], times[randint(0, len(times) - 1)], days[
randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[
randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)
c.execute(
'INSERT INTO product_context (id,recommendation_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)'
, values)
for i in range(1, 1000):
product_id = product_ids[randint(0, len(product_ids) - 1)]
values = user_ids[randint(0, len(user_ids) - 1)], product_id
c.execute(
"INSERT INTO recommendations (id,user_id,product_id,interacted) VALUES (NULL,?,?,'True')"
, values)
values = c.lastrowid, product_id, device[randint(0, len(device) - 1)
], oses[randint(0, len(oses) - 1)], times[randint(0, len(times) -
1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) -
1)], lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)
c.execute(
'INSERT INTO product_context (id,recommendation_id,product_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'
, values)
conn.commit()
<|reserved_special_token_1|>
__author__ = 'tomer'
import sqlite3
from random import randint
import test_data
def init_database(conn):
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS catalogs
(id INTEGER PRIMARY KEY AUTOINCREMENT, catalog_name TEXT)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS products
(id INTEGER PRIMARY KEY AUTOINCREMENT, sku_id INTEGER, catalog_id INTEGER, product_name TEXT, price FLOAT, description TEXT)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS users
(id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS products_bought
(id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER,product_id INTEGER)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS product_context
(id INTEGER PRIMARY KEY AUTOINCREMENT,recommendation_id INTEGER, product_id INTEGER, device TEXT, os TEXT, time_of_day TEXT, day_of_week TEXT, latitude float, longitude float,num_items_in_cart INTEGER, purchases_in_last_month INTEGER)"""
)
c.execute(
"""CREATE TABLE IF NOT EXISTS recommendations
(id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER, product_id INTEGER, interacted BOOLEAN)"""
)
def load_fake_data(conn):
c = conn.cursor()
c.execute('DELETE FROM catalogs')
c.execute('DELETE FROM products')
c.execute('DELETE FROM users')
c.execute('DELETE FROM products_bought')
c.execute('DELETE FROM product_context')
c.execute('DELETE FROM recommendations')
catalogs = []
c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', ('BestBuy',))
catalogs.append(c.lastrowid)
c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', ('RiteAid',))
catalogs.append(c.lastrowid)
ppl = []
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Tomer',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Alex',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Matt',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Rachael',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Sam',))
ppl.append(c.lastrowid)
c.execute('INSERT INTO users (user_name) VALUES (?)', ('Joey',))
ppl.append(c.lastrowid)
products = []
for i in range(1, 20):
c.execute(
'INSERT INTO products (id,sku_id,catalog_id, product_name, price,description) VALUES (NULL,?,?,?,?,?)'
, (randint(1, 2000), catalogs[randint(0, len(catalogs) - 1)],
'Movie' + str(i), randint(1, 2000), 'Title' + str(i)))
products.append(c.lastrowid)
for i in range(1, 50):
c.execute(
'INSERT INTO products_bought (id,user_id, product_id) VALUES (NULL,?,?)'
, (ppl[randint(0, len(ppl) - 1)], products[randint(0, len(
products) - 1)]))
values = c.lastrowid, device[randint(0, len(device) - 1)], oses[randint
(0, len(oses) - 1)], times[randint(0, len(times) - 1)], days[
randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[
randint(0, len(lons) - 1)], randint(0, 5), randint(0, 30)
c.execute(
'INSERT INTO product_context (id,recommendation_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)'
, values)
for i in range(1, 1000):
product_id = products[randint(0, len(products) - 1)]
c.execute(
"INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'true')"
, (ppl[randint(0, len(ppl) - 1)], product_id))
values = c.lastrowid, product_id, device[randint(0, len(device) - 1)
], oses[randint(0, len(oses) - 1)], times[randint(0, len(times) -
1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) -
1)], lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)
c.execute(
'INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'
, values)
conn.commit()
oses = ['IOS', 'Android']
device = ['mobile']
<|reserved_special_token_0|>
times = ['morning', 'afternoon', 'night']
days = ['M']
<|reserved_special_token_0|>
lats = ['north']
<|reserved_special_token_0|>
lons = ['east']
def get_users(conn):
c = conn.cursor()
c.execute('select * from users')
return c.fetchall()
def get_catalogs(conn):
c = conn.cursor()
c.execute('select * from catalogs')
return c.fetchall()
def get_products(conn, catalog_id):
c = conn.cursor()
c.execute('select * from products where catalog_id = ?', (catalog_id,))
return c.fetchall()
def get_product_by_id(conn, catalog_id, product_id):
c = conn.cursor()
c.execute('SELECT * FROM products WHERE catalog_id = ? AND id = ?', (
catalog_id, product_id))
return c.fetchall()
def get_products_bought(conn, catalog_id):
c = conn.cursor()
c.execute(
'select pb.* from products_bought pb, catalogs cat, products p where pb.product_id = p.id and p.catalog_id = ?'
, (catalog_id,))
return c.fetchall()
def get_all_data(conn):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id'
)
return c.fetchall()
def get_data_for_user(conn, userid):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ?'
, (userid,))
return c.fetchall()
def get_data_for_user_and_catalog(conn, userid, catalogid):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ? and c.id = ?'
, (userid, catalogid))
return c.fetchall()
def get_transactions_for_catalog(conn, catalogid):
c = conn.cursor()
c.execute(
'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and c.id = ?'
, (catalogid,))
return c.fetchall()
def get_recommendations_by_user(conn, userId):
c = conn.cursor()
c.execute(
'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.user_id = ?'
, (userId,))
return c.fetchall()
def get_recommendations_by_product(conn, productId):
c = conn.cursor()
c.execute(
'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.product_id = ?'
, (productId,))
return c.fetchall()
def get_connection():
return sqlite3.connect('recommendation_engine.db')
def generate_context(product_id):
return [product_id, device[randint(0, len(device) - 1)], oses[randint(0,
len(oses) - 1)], times[randint(0, len(times) - 1)], days[randint(0,
len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[randint(0,
len(lons) - 1)], randint(0, 3), randint(0, 3)]
def add_recommendation(conn, product_ids, user_ids, contexts):
ids = []
c = conn.cursor()
for i in range(0, len(product_ids)):
product_id = product_ids[i]
user_id = user_ids[i]
context = contexts[i]
c.execute(
"INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'false')"
, (user_id, product_id))
context.insert(0, c.lastrowid)
ids.append(c.lastrowid)
c.execute(
'INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'
, context)
conn.commit()
c.execute(
'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.id in (%s)'
% ','.join('?' * len(ids)), ids)
return c.fetchall()
def get_probability(conn, x, giveny):
c = conn.cursor()
query = 'select count(*) from product_context where '
first = True
params = []
for key, val in x.items():
if not first:
query += ' and '
else:
first = False
query += str(key) + '=?'
params.append(str(val))
c.execute(query, params)
total = c.fetchone()[0]
for key, val in giveny.items():
query += ' and ' + str(key) + '=?'
params.append(str(val))
c.execute(query, params)
smaller = c.fetchone()[0]
if total == 0:
return 0
else:
return smaller / float(total)
def load_test_data(conn):
c = conn.cursor()
c.execute('DELETE FROM catalogs')
c.execute('DELETE FROM products')
c.execute('DELETE FROM users')
c.execute('DELETE FROM products_bought')
c.execute('DELETE FROM product_context')
c.execute('DELETE FROM recommendations')
user_names = test_data.USER_NAMES
product_names = test_data.PRODUCT_NAMES
prices = test_data.POSSIBLE_PRICES
catalog_ids = []
c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', (
'MovieDatabase',))
catalog_ids.append(c.lastrowid)
user_ids = []
for user in user_names:
c.execute('INSERT INTO users (user_name) VALUES (?)', (user,))
user_ids.append(c.lastrowid)
product_ids = []
for product in product_names:
values = randint(1, 2000), catalog_ids[0], product, prices[randint(
0, len(prices) - 1)], 'desc'
c.execute(
'INSERT INTO products (id, sku_id, catalog_id, product_name, price, description) VALUES (NULL,?,?,?,?,?)'
, values)
product_ids.append(c.lastrowid)
for i in range(1, 50):
values = user_ids[randint(0, len(user_ids) - 1)], product_ids[randint
(0, len(product_ids) - 1)]
c.execute(
'INSERT INTO products_bought (id,user_id,product_id) VALUES (NULL,?,?)'
, values)
values = c.lastrowid, device[randint(0, len(device) - 1)], oses[randint
(0, len(oses) - 1)], times[randint(0, len(times) - 1)], days[
randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[
randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)
c.execute(
'INSERT INTO product_context (id,recommendation_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)'
, values)
for i in range(1, 1000):
product_id = product_ids[randint(0, len(product_ids) - 1)]
values = user_ids[randint(0, len(user_ids) - 1)], product_id
c.execute(
"INSERT INTO recommendations (id,user_id,product_id,interacted) VALUES (NULL,?,?,'True')"
, values)
values = c.lastrowid, product_id, device[randint(0, len(device) - 1)
], oses[randint(0, len(oses) - 1)], times[randint(0, len(times) -
1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) -
1)], lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)
c.execute(
'INSERT INTO product_context (id,recommendation_id,product_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'
, values)
conn.commit()
<|reserved_special_token_1|>
__author__ = 'tomer'
import sqlite3
from random import randint
import test_data
def init_database(conn):
c = conn.cursor()
c.execute('''CREATE TABLE IF NOT EXISTS catalogs
(id INTEGER PRIMARY KEY AUTOINCREMENT, catalog_name TEXT)''')
c.execute('''CREATE TABLE IF NOT EXISTS products
(id INTEGER PRIMARY KEY AUTOINCREMENT, sku_id INTEGER, catalog_id INTEGER, product_name TEXT, price FLOAT, description TEXT)''')
c.execute('''CREATE TABLE IF NOT EXISTS users
(id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT)''')
c.execute('''CREATE TABLE IF NOT EXISTS products_bought
(id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER,product_id INTEGER)''')
c.execute('''CREATE TABLE IF NOT EXISTS product_context
(id INTEGER PRIMARY KEY AUTOINCREMENT,recommendation_id INTEGER, product_id INTEGER, device TEXT, os TEXT, time_of_day TEXT, day_of_week TEXT, latitude float, longitude float,num_items_in_cart INTEGER, purchases_in_last_month INTEGER)''')
c.execute('''CREATE TABLE IF NOT EXISTS recommendations
(id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER, product_id INTEGER, interacted BOOLEAN)''')
def load_fake_data(conn):
c = conn.cursor()
c.execute('''DELETE FROM catalogs''')
c.execute('''DELETE FROM products''')
c.execute('''DELETE FROM users''')
c.execute('''DELETE FROM products_bought''')
c.execute('''DELETE FROM product_context''')
c.execute('''DELETE FROM recommendations''')
catalogs = []
c.execute('''INSERT INTO catalogs (catalog_name) VALUES (?)''',('BestBuy',))
catalogs.append(c.lastrowid)
c.execute('''INSERT INTO catalogs (catalog_name) VALUES (?)''',('RiteAid',))
catalogs.append(c.lastrowid)
ppl = []
c.execute('''INSERT INTO users (user_name) VALUES (?)''',('Tomer',))
ppl.append(c.lastrowid)
c.execute('''INSERT INTO users (user_name) VALUES (?)''',('Alex',))
ppl.append(c.lastrowid)
c.execute('''INSERT INTO users (user_name) VALUES (?)''',('Matt',))
ppl.append(c.lastrowid)
c.execute('''INSERT INTO users (user_name) VALUES (?)''',('Rachael',))
ppl.append(c.lastrowid)
c.execute('''INSERT INTO users (user_name) VALUES (?)''',('Sam',))
ppl.append(c.lastrowid)
c.execute('''INSERT INTO users (user_name) VALUES (?)''',('Joey',))
ppl.append(c.lastrowid)
products = []
# Load fake products
for i in range(1,20):
c.execute('''INSERT INTO products (id,sku_id,catalog_id, product_name, price,description) VALUES (NULL,?,?,?,?,?)''',(randint(1,2000),catalogs[randint(0,len(catalogs)-1)],'Movie' + str(i),randint(1,2000),'Title' + str(i)))
products.append(c.lastrowid)
# Load fake transactions
for i in range(1,50):
c.execute('''INSERT INTO products_bought (id,user_id, product_id) VALUES (NULL,?,?)''',(ppl[randint(0,len(ppl)-1)],products[randint(0,len(products)-1)]))
values = (c.lastrowid,device[randint(0,len(device)-1)],oses[randint(0,len(oses)-1)], times[randint(0,len(times)-1)], days[randint(0,len(days)-1)], lats[randint(0,len(lats)-1)], lons[randint(0,len(lons)-1)],randint(0,5),randint(0,30))
c.execute('''INSERT INTO product_context (id,recommendation_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)''',values)
# Load fake recommendations
for i in range(1,1000):
product_id = products[randint(0, len(products) - 1)]
c.execute('''INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'true')''',(ppl[randint(0,len(ppl)-1)],product_id))
values = (c.lastrowid,product_id,device[randint(0,len(device)-1)],oses[randint(0,len(oses)-1)], times[randint(0,len(times)-1)], days[randint(0,len(days)-1)], lats[randint(0,len(lats)-1)], lons[randint(0,len(lons)-1)],randint(0,3),randint(0,3))
c.execute('''INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)''',values)
conn.commit()
oses = ['IOS', 'Android']#, 'Windows10', 'macOS']
device = ['mobile']#, 'computer']
'''
times = ['10:33 AM',
'2:38 PM',
'3:01 AM',
'12:31 AM',
'2:56 PM',
'8:01 AM',
'5:00 PM',
'9:38 PM',
'3:01 AM']
'''
times = ['morning', 'afternoon', 'night']
days = ['M']#['M', 'T', 'W', 'R', 'F', 'S', 'Su']
'''
lats = ['-149.8935557',
'-149.9054948',
'-149.7522',
'-149.8643361',
'-149.8379726',
'-149.9092788',
'-149.7364877',
'-149.8211',
'-149.8445832',
'-149.9728678']
'''
lats = ['north']#, 'south']
'''
lons = ['61.21759217',
'61.19533942',
'61.2297',
'61.19525062',
'61.13751355',
'61.13994658',
'61.19533265',
'61.2156',
'61.13806145',
'61.176693']
'''
lons = ['east']#, 'west']
def get_users(conn):
c = conn.cursor()
c.execute('''select * from users''')
return c.fetchall()
def get_catalogs(conn):
c = conn.cursor()
c.execute('''select * from catalogs''')
return c.fetchall()
def get_products(conn, catalog_id):
c = conn.cursor()
c.execute('''select * from products where catalog_id = ?''',(catalog_id,))
return c.fetchall()
def get_product_by_id(conn, catalog_id, product_id):
c = conn.cursor()
c.execute('''SELECT * FROM products WHERE catalog_id = ? AND id = ?''',(catalog_id,product_id))
return c.fetchall()
def get_products_bought(conn, catalog_id):
c = conn.cursor()
c.execute('''select pb.* from products_bought pb, catalogs cat, products p where pb.product_id = p.id and p.catalog_id = ?''',(catalog_id,))
return c.fetchall()
def get_all_data(conn):
c = conn.cursor()
c.execute('''select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id''')
return c. fetchall()
def get_data_for_user(conn,userid):
c = conn.cursor()
c.execute('''select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ?''',(userid,))
return c.fetchall()
def get_data_for_user_and_catalog(conn, userid, catalogid):
c = conn.cursor()
c.execute('''select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ? and c.id = ?''',(userid,catalogid))
return c.fetchall()
def get_transactions_for_catalog(conn,catalogid):
c = conn.cursor()
c.execute('''select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and c.id = ?''',(catalogid,))
return c.fetchall()
def get_recommendations_by_user(conn,userId):
c = conn.cursor()
c.execute('''select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.user_id = ?''',(userId,))
return c.fetchall()
def get_recommendations_by_product(conn,productId):
c = conn.cursor()
c.execute('''select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.product_id = ?''',(productId,))
return c.fetchall()
def get_connection():
return sqlite3.connect('recommendation_engine.db')
def generate_context(product_id):
return [product_id, device[randint(0, len(device) - 1)], oses[randint(0, len(oses) - 1)],
times[randint(0, len(times) - 1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)],
lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)]
def add_recommendation(conn, product_ids,user_ids,contexts):
ids = []
c = conn.cursor()
for i in range(0,len(product_ids)):
product_id = product_ids[i]
user_id = user_ids[i]
context = contexts[i]
c.execute('''INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'false')''',
(user_id, product_id))
context.insert(0,c.lastrowid)
ids.append(c.lastrowid)
c.execute( '''INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)''',
context)
conn.commit()
c.execute('select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.id in (%s)' %
','.join('?'*len(ids)), ids)
return c.fetchall()
def get_probability(conn, x, giveny):
c = conn.cursor()
query = '''select count(*) from product_context where '''
first = True
params = []
for key,val in x.items():
if not first:
query += ' and '
else:
first = False
query += str(key) + '=?'
params.append(str(val))
c.execute(query,params)
total = c.fetchone()[0]
for key,val in giveny.items():
query += ' and ' + str(key) + '=?'
params.append(str(val))
c.execute(query,params)
smaller = c.fetchone()[0]
if total == 0:
return 0
else:
return smaller/float(total)
def load_test_data(conn):
c = conn.cursor()
# Clear database
c.execute('''DELETE FROM catalogs''')
c.execute('''DELETE FROM products''')
c.execute('''DELETE FROM users''')
c.execute('''DELETE FROM products_bought''')
c.execute('''DELETE FROM product_context''')
c.execute('''DELETE FROM recommendations''')
# Initialize users
user_names = test_data.USER_NAMES
# Initialize movie names
product_names = test_data.PRODUCT_NAMES
# Initialize Prices
prices = test_data.POSSIBLE_PRICES
# Load test catalog
catalog_ids = []
c.execute('''INSERT INTO catalogs (catalog_name) VALUES (?)''', ('MovieDatabase',))
catalog_ids.append(c.lastrowid)
# Load test users
user_ids = []
for user in user_names:
c.execute('''INSERT INTO users (user_name) VALUES (?)''', (user,))
user_ids.append(c.lastrowid)
# Load test products
product_ids = []
for product in product_names:
values = (randint(1, 2000), catalog_ids[0], product, prices[randint(0, len(prices)-1)], 'desc')
c.execute('''INSERT INTO products (id, sku_id, catalog_id, product_name, price, description) VALUES (NULL,?,?,?,?,?)''', values)
product_ids.append(c.lastrowid)
# Load fake transactions
for i in range(1, 50):
values = (user_ids[randint(0, len(user_ids)-1)], product_ids[randint(0, len(product_ids)-1)])
c.execute('''INSERT INTO products_bought (id,user_id,product_id) VALUES (NULL,?,?)''', values)
values = (c.lastrowid,
device[randint(0, len(device) - 1)],
oses[randint(0, len(oses) - 1)],
times[randint(0, len(times) - 1)],
days[randint(0, len(days) - 1)],
lats[randint(0, len(lats) - 1)],
lons[randint(0, len(lons) - 1)],
randint(0, 3),
randint(0, 3))
c.execute('''INSERT INTO product_context (id,recommendation_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)''', values)
# Load fake recommendations
for i in range(1, 1000):
product_id = product_ids[randint(0, len(product_ids)-1)]
values = (user_ids[randint(0, len(user_ids)-1)], product_id,)
c.execute('''INSERT INTO recommendations (id,user_id,product_id,interacted) VALUES (NULL,?,?,'True')''', values)
values =(c.lastrowid,
product_id,
device[randint(0, len(device) - 1)],
oses[randint(0, len(oses) - 1)],
times[randint(0, len(times) - 1)],
days[randint(0, len(days) - 1)],
lats[randint(0, len(lats) - 1)],
lons[randint(0, len(lons) - 1)],
randint(0, 3),
randint(0, 3))
c.execute('''INSERT INTO product_context (id,recommendation_id,product_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)''', values)
conn.commit()
|
flexible
|
{
"blob_id": "46b1e5adbd956c35820d7d2b17628364388cdcd7",
"index": 3638,
"step-1": "<mask token>\n\n\ndef init_database(conn):\n c = conn.cursor()\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS catalogs\n (id INTEGER PRIMARY KEY AUTOINCREMENT, catalog_name TEXT)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS products\n (id INTEGER PRIMARY KEY AUTOINCREMENT, sku_id INTEGER, catalog_id INTEGER, product_name TEXT, price FLOAT, description TEXT)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS users\n (id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS products_bought\n (id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER,product_id INTEGER)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS product_context\n (id INTEGER PRIMARY KEY AUTOINCREMENT,recommendation_id INTEGER, product_id INTEGER, device TEXT, os TEXT, time_of_day TEXT, day_of_week TEXT, latitude float, longitude float,num_items_in_cart INTEGER, purchases_in_last_month INTEGER)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS recommendations\n (id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER, product_id INTEGER, interacted BOOLEAN)\"\"\"\n )\n\n\n<mask token>\n\n\ndef get_users(conn):\n c = conn.cursor()\n c.execute('select * from users')\n return c.fetchall()\n\n\ndef get_catalogs(conn):\n c = conn.cursor()\n c.execute('select * from catalogs')\n return c.fetchall()\n\n\n<mask token>\n\n\ndef get_product_by_id(conn, catalog_id, product_id):\n c = conn.cursor()\n c.execute('SELECT * FROM products WHERE catalog_id = ? AND id = ?', (\n catalog_id, product_id))\n return c.fetchall()\n\n\n<mask token>\n\n\ndef get_all_data(conn):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id'\n )\n return c.fetchall()\n\n\ndef get_data_for_user(conn, userid):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ?'\n , (userid,))\n return c.fetchall()\n\n\n<mask token>\n\n\ndef get_recommendations_by_product(conn, productId):\n c = conn.cursor()\n c.execute(\n 'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.product_id = ?'\n , (productId,))\n return c.fetchall()\n\n\n<mask token>\n\n\ndef generate_context(product_id):\n return [product_id, device[randint(0, len(device) - 1)], oses[randint(0,\n len(oses) - 1)], times[randint(0, len(times) - 1)], days[randint(0,\n len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[randint(0, \n len(lons) - 1)], randint(0, 3), randint(0, 3)]\n\n\n<mask token>\n\n\ndef get_probability(conn, x, giveny):\n c = conn.cursor()\n query = 'select count(*) from product_context where '\n first = True\n params = []\n for key, val in x.items():\n if not first:\n query += ' and '\n else:\n first = False\n query += str(key) + '=?'\n params.append(str(val))\n c.execute(query, params)\n total = c.fetchone()[0]\n for key, val in giveny.items():\n query += ' and ' + str(key) + '=?'\n params.append(str(val))\n c.execute(query, params)\n smaller = c.fetchone()[0]\n if total == 0:\n return 0\n else:\n return smaller / float(total)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef init_database(conn):\n c = conn.cursor()\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS catalogs\n (id INTEGER PRIMARY KEY AUTOINCREMENT, catalog_name TEXT)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS products\n (id INTEGER PRIMARY KEY AUTOINCREMENT, sku_id INTEGER, catalog_id INTEGER, product_name TEXT, price FLOAT, description TEXT)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS users\n (id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS products_bought\n (id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER,product_id INTEGER)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS product_context\n (id INTEGER PRIMARY KEY AUTOINCREMENT,recommendation_id INTEGER, product_id INTEGER, device TEXT, os TEXT, time_of_day TEXT, day_of_week TEXT, latitude float, longitude float,num_items_in_cart INTEGER, purchases_in_last_month INTEGER)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS recommendations\n (id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER, product_id INTEGER, interacted BOOLEAN)\"\"\"\n )\n\n\ndef load_fake_data(conn):\n c = conn.cursor()\n c.execute('DELETE FROM catalogs')\n c.execute('DELETE FROM products')\n c.execute('DELETE FROM users')\n c.execute('DELETE FROM products_bought')\n c.execute('DELETE FROM product_context')\n c.execute('DELETE FROM recommendations')\n catalogs = []\n c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', ('BestBuy',))\n catalogs.append(c.lastrowid)\n c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', ('RiteAid',))\n catalogs.append(c.lastrowid)\n ppl = []\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Tomer',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Alex',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Matt',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Rachael',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Sam',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Joey',))\n ppl.append(c.lastrowid)\n products = []\n for i in range(1, 20):\n c.execute(\n 'INSERT INTO products (id,sku_id,catalog_id, product_name, price,description) VALUES (NULL,?,?,?,?,?)'\n , (randint(1, 2000), catalogs[randint(0, len(catalogs) - 1)], \n 'Movie' + str(i), randint(1, 2000), 'Title' + str(i)))\n products.append(c.lastrowid)\n for i in range(1, 50):\n c.execute(\n 'INSERT INTO products_bought (id,user_id, product_id) VALUES (NULL,?,?)'\n , (ppl[randint(0, len(ppl) - 1)], products[randint(0, len(\n products) - 1)]))\n values = c.lastrowid, device[randint(0, len(device) - 1)], oses[randint\n (0, len(oses) - 1)], times[randint(0, len(times) - 1)], days[\n randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[\n randint(0, len(lons) - 1)], randint(0, 5), randint(0, 30)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)'\n , values)\n for i in range(1, 1000):\n product_id = products[randint(0, len(products) - 1)]\n c.execute(\n \"INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'true')\"\n , (ppl[randint(0, len(ppl) - 1)], product_id))\n values = c.lastrowid, product_id, device[randint(0, len(device) - 1)\n ], oses[randint(0, len(oses) - 1)], times[randint(0, len(times) -\n 1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) -\n 1)], lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'\n , values)\n conn.commit()\n\n\n<mask token>\n\n\ndef get_users(conn):\n c = conn.cursor()\n c.execute('select * from users')\n return c.fetchall()\n\n\ndef get_catalogs(conn):\n c = conn.cursor()\n c.execute('select * from catalogs')\n return c.fetchall()\n\n\ndef get_products(conn, catalog_id):\n c = conn.cursor()\n c.execute('select * from products where catalog_id = ?', (catalog_id,))\n return c.fetchall()\n\n\ndef get_product_by_id(conn, catalog_id, product_id):\n c = conn.cursor()\n c.execute('SELECT * FROM products WHERE catalog_id = ? AND id = ?', (\n catalog_id, product_id))\n return c.fetchall()\n\n\ndef get_products_bought(conn, catalog_id):\n c = conn.cursor()\n c.execute(\n 'select pb.* from products_bought pb, catalogs cat, products p where pb.product_id = p.id and p.catalog_id = ?'\n , (catalog_id,))\n return c.fetchall()\n\n\ndef get_all_data(conn):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id'\n )\n return c.fetchall()\n\n\ndef get_data_for_user(conn, userid):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ?'\n , (userid,))\n return c.fetchall()\n\n\ndef get_data_for_user_and_catalog(conn, userid, catalogid):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ? and c.id = ?'\n , (userid, catalogid))\n return c.fetchall()\n\n\n<mask token>\n\n\ndef get_recommendations_by_user(conn, userId):\n c = conn.cursor()\n c.execute(\n 'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.user_id = ?'\n , (userId,))\n return c.fetchall()\n\n\ndef get_recommendations_by_product(conn, productId):\n c = conn.cursor()\n c.execute(\n 'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.product_id = ?'\n , (productId,))\n return c.fetchall()\n\n\ndef get_connection():\n return sqlite3.connect('recommendation_engine.db')\n\n\ndef generate_context(product_id):\n return [product_id, device[randint(0, len(device) - 1)], oses[randint(0,\n len(oses) - 1)], times[randint(0, len(times) - 1)], days[randint(0,\n len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[randint(0, \n len(lons) - 1)], randint(0, 3), randint(0, 3)]\n\n\ndef add_recommendation(conn, product_ids, user_ids, contexts):\n ids = []\n c = conn.cursor()\n for i in range(0, len(product_ids)):\n product_id = product_ids[i]\n user_id = user_ids[i]\n context = contexts[i]\n c.execute(\n \"INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'false')\"\n , (user_id, product_id))\n context.insert(0, c.lastrowid)\n ids.append(c.lastrowid)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'\n , context)\n conn.commit()\n c.execute(\n 'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.id in (%s)'\n % ','.join('?' * len(ids)), ids)\n return c.fetchall()\n\n\ndef get_probability(conn, x, giveny):\n c = conn.cursor()\n query = 'select count(*) from product_context where '\n first = True\n params = []\n for key, val in x.items():\n if not first:\n query += ' and '\n else:\n first = False\n query += str(key) + '=?'\n params.append(str(val))\n c.execute(query, params)\n total = c.fetchone()[0]\n for key, val in giveny.items():\n query += ' and ' + str(key) + '=?'\n params.append(str(val))\n c.execute(query, params)\n smaller = c.fetchone()[0]\n if total == 0:\n return 0\n else:\n return smaller / float(total)\n\n\ndef load_test_data(conn):\n c = conn.cursor()\n c.execute('DELETE FROM catalogs')\n c.execute('DELETE FROM products')\n c.execute('DELETE FROM users')\n c.execute('DELETE FROM products_bought')\n c.execute('DELETE FROM product_context')\n c.execute('DELETE FROM recommendations')\n user_names = test_data.USER_NAMES\n product_names = test_data.PRODUCT_NAMES\n prices = test_data.POSSIBLE_PRICES\n catalog_ids = []\n c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', (\n 'MovieDatabase',))\n catalog_ids.append(c.lastrowid)\n user_ids = []\n for user in user_names:\n c.execute('INSERT INTO users (user_name) VALUES (?)', (user,))\n user_ids.append(c.lastrowid)\n product_ids = []\n for product in product_names:\n values = randint(1, 2000), catalog_ids[0], product, prices[randint(\n 0, len(prices) - 1)], 'desc'\n c.execute(\n 'INSERT INTO products (id, sku_id, catalog_id, product_name, price, description) VALUES (NULL,?,?,?,?,?)'\n , values)\n product_ids.append(c.lastrowid)\n for i in range(1, 50):\n values = user_ids[randint(0, len(user_ids) - 1)], product_ids[randint\n (0, len(product_ids) - 1)]\n c.execute(\n 'INSERT INTO products_bought (id,user_id,product_id) VALUES (NULL,?,?)'\n , values)\n values = c.lastrowid, device[randint(0, len(device) - 1)], oses[randint\n (0, len(oses) - 1)], times[randint(0, len(times) - 1)], days[\n randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[\n randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)'\n , values)\n for i in range(1, 1000):\n product_id = product_ids[randint(0, len(product_ids) - 1)]\n values = user_ids[randint(0, len(user_ids) - 1)], product_id\n c.execute(\n \"INSERT INTO recommendations (id,user_id,product_id,interacted) VALUES (NULL,?,?,'True')\"\n , values)\n values = c.lastrowid, product_id, device[randint(0, len(device) - 1)\n ], oses[randint(0, len(oses) - 1)], times[randint(0, len(times) -\n 1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) -\n 1)], lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id,product_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'\n , values)\n conn.commit()\n",
"step-3": "__author__ = 'tomer'\n<mask token>\n\n\ndef init_database(conn):\n c = conn.cursor()\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS catalogs\n (id INTEGER PRIMARY KEY AUTOINCREMENT, catalog_name TEXT)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS products\n (id INTEGER PRIMARY KEY AUTOINCREMENT, sku_id INTEGER, catalog_id INTEGER, product_name TEXT, price FLOAT, description TEXT)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS users\n (id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS products_bought\n (id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER,product_id INTEGER)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS product_context\n (id INTEGER PRIMARY KEY AUTOINCREMENT,recommendation_id INTEGER, product_id INTEGER, device TEXT, os TEXT, time_of_day TEXT, day_of_week TEXT, latitude float, longitude float,num_items_in_cart INTEGER, purchases_in_last_month INTEGER)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS recommendations\n (id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER, product_id INTEGER, interacted BOOLEAN)\"\"\"\n )\n\n\ndef load_fake_data(conn):\n c = conn.cursor()\n c.execute('DELETE FROM catalogs')\n c.execute('DELETE FROM products')\n c.execute('DELETE FROM users')\n c.execute('DELETE FROM products_bought')\n c.execute('DELETE FROM product_context')\n c.execute('DELETE FROM recommendations')\n catalogs = []\n c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', ('BestBuy',))\n catalogs.append(c.lastrowid)\n c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', ('RiteAid',))\n catalogs.append(c.lastrowid)\n ppl = []\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Tomer',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Alex',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Matt',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Rachael',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Sam',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Joey',))\n ppl.append(c.lastrowid)\n products = []\n for i in range(1, 20):\n c.execute(\n 'INSERT INTO products (id,sku_id,catalog_id, product_name, price,description) VALUES (NULL,?,?,?,?,?)'\n , (randint(1, 2000), catalogs[randint(0, len(catalogs) - 1)], \n 'Movie' + str(i), randint(1, 2000), 'Title' + str(i)))\n products.append(c.lastrowid)\n for i in range(1, 50):\n c.execute(\n 'INSERT INTO products_bought (id,user_id, product_id) VALUES (NULL,?,?)'\n , (ppl[randint(0, len(ppl) - 1)], products[randint(0, len(\n products) - 1)]))\n values = c.lastrowid, device[randint(0, len(device) - 1)], oses[randint\n (0, len(oses) - 1)], times[randint(0, len(times) - 1)], days[\n randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[\n randint(0, len(lons) - 1)], randint(0, 5), randint(0, 30)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)'\n , values)\n for i in range(1, 1000):\n product_id = products[randint(0, len(products) - 1)]\n c.execute(\n \"INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'true')\"\n , (ppl[randint(0, len(ppl) - 1)], product_id))\n values = c.lastrowid, product_id, device[randint(0, len(device) - 1)\n ], oses[randint(0, len(oses) - 1)], times[randint(0, len(times) -\n 1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) -\n 1)], lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'\n , values)\n conn.commit()\n\n\noses = ['IOS', 'Android']\ndevice = ['mobile']\n<mask token>\ntimes = ['morning', 'afternoon', 'night']\ndays = ['M']\n<mask token>\nlats = ['north']\n<mask token>\nlons = ['east']\n\n\ndef get_users(conn):\n c = conn.cursor()\n c.execute('select * from users')\n return c.fetchall()\n\n\ndef get_catalogs(conn):\n c = conn.cursor()\n c.execute('select * from catalogs')\n return c.fetchall()\n\n\ndef get_products(conn, catalog_id):\n c = conn.cursor()\n c.execute('select * from products where catalog_id = ?', (catalog_id,))\n return c.fetchall()\n\n\ndef get_product_by_id(conn, catalog_id, product_id):\n c = conn.cursor()\n c.execute('SELECT * FROM products WHERE catalog_id = ? AND id = ?', (\n catalog_id, product_id))\n return c.fetchall()\n\n\ndef get_products_bought(conn, catalog_id):\n c = conn.cursor()\n c.execute(\n 'select pb.* from products_bought pb, catalogs cat, products p where pb.product_id = p.id and p.catalog_id = ?'\n , (catalog_id,))\n return c.fetchall()\n\n\ndef get_all_data(conn):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id'\n )\n return c.fetchall()\n\n\ndef get_data_for_user(conn, userid):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ?'\n , (userid,))\n return c.fetchall()\n\n\ndef get_data_for_user_and_catalog(conn, userid, catalogid):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ? and c.id = ?'\n , (userid, catalogid))\n return c.fetchall()\n\n\ndef get_transactions_for_catalog(conn, catalogid):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and c.id = ?'\n , (catalogid,))\n return c.fetchall()\n\n\ndef get_recommendations_by_user(conn, userId):\n c = conn.cursor()\n c.execute(\n 'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.user_id = ?'\n , (userId,))\n return c.fetchall()\n\n\ndef get_recommendations_by_product(conn, productId):\n c = conn.cursor()\n c.execute(\n 'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.product_id = ?'\n , (productId,))\n return c.fetchall()\n\n\ndef get_connection():\n return sqlite3.connect('recommendation_engine.db')\n\n\ndef generate_context(product_id):\n return [product_id, device[randint(0, len(device) - 1)], oses[randint(0,\n len(oses) - 1)], times[randint(0, len(times) - 1)], days[randint(0,\n len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[randint(0, \n len(lons) - 1)], randint(0, 3), randint(0, 3)]\n\n\ndef add_recommendation(conn, product_ids, user_ids, contexts):\n ids = []\n c = conn.cursor()\n for i in range(0, len(product_ids)):\n product_id = product_ids[i]\n user_id = user_ids[i]\n context = contexts[i]\n c.execute(\n \"INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'false')\"\n , (user_id, product_id))\n context.insert(0, c.lastrowid)\n ids.append(c.lastrowid)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'\n , context)\n conn.commit()\n c.execute(\n 'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.id in (%s)'\n % ','.join('?' * len(ids)), ids)\n return c.fetchall()\n\n\ndef get_probability(conn, x, giveny):\n c = conn.cursor()\n query = 'select count(*) from product_context where '\n first = True\n params = []\n for key, val in x.items():\n if not first:\n query += ' and '\n else:\n first = False\n query += str(key) + '=?'\n params.append(str(val))\n c.execute(query, params)\n total = c.fetchone()[0]\n for key, val in giveny.items():\n query += ' and ' + str(key) + '=?'\n params.append(str(val))\n c.execute(query, params)\n smaller = c.fetchone()[0]\n if total == 0:\n return 0\n else:\n return smaller / float(total)\n\n\ndef load_test_data(conn):\n c = conn.cursor()\n c.execute('DELETE FROM catalogs')\n c.execute('DELETE FROM products')\n c.execute('DELETE FROM users')\n c.execute('DELETE FROM products_bought')\n c.execute('DELETE FROM product_context')\n c.execute('DELETE FROM recommendations')\n user_names = test_data.USER_NAMES\n product_names = test_data.PRODUCT_NAMES\n prices = test_data.POSSIBLE_PRICES\n catalog_ids = []\n c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', (\n 'MovieDatabase',))\n catalog_ids.append(c.lastrowid)\n user_ids = []\n for user in user_names:\n c.execute('INSERT INTO users (user_name) VALUES (?)', (user,))\n user_ids.append(c.lastrowid)\n product_ids = []\n for product in product_names:\n values = randint(1, 2000), catalog_ids[0], product, prices[randint(\n 0, len(prices) - 1)], 'desc'\n c.execute(\n 'INSERT INTO products (id, sku_id, catalog_id, product_name, price, description) VALUES (NULL,?,?,?,?,?)'\n , values)\n product_ids.append(c.lastrowid)\n for i in range(1, 50):\n values = user_ids[randint(0, len(user_ids) - 1)], product_ids[randint\n (0, len(product_ids) - 1)]\n c.execute(\n 'INSERT INTO products_bought (id,user_id,product_id) VALUES (NULL,?,?)'\n , values)\n values = c.lastrowid, device[randint(0, len(device) - 1)], oses[randint\n (0, len(oses) - 1)], times[randint(0, len(times) - 1)], days[\n randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[\n randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)'\n , values)\n for i in range(1, 1000):\n product_id = product_ids[randint(0, len(product_ids) - 1)]\n values = user_ids[randint(0, len(user_ids) - 1)], product_id\n c.execute(\n \"INSERT INTO recommendations (id,user_id,product_id,interacted) VALUES (NULL,?,?,'True')\"\n , values)\n values = c.lastrowid, product_id, device[randint(0, len(device) - 1)\n ], oses[randint(0, len(oses) - 1)], times[randint(0, len(times) -\n 1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) -\n 1)], lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id,product_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'\n , values)\n conn.commit()\n",
"step-4": "__author__ = 'tomer'\nimport sqlite3\nfrom random import randint\nimport test_data\n\n\ndef init_database(conn):\n c = conn.cursor()\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS catalogs\n (id INTEGER PRIMARY KEY AUTOINCREMENT, catalog_name TEXT)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS products\n (id INTEGER PRIMARY KEY AUTOINCREMENT, sku_id INTEGER, catalog_id INTEGER, product_name TEXT, price FLOAT, description TEXT)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS users\n (id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS products_bought\n (id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER,product_id INTEGER)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS product_context\n (id INTEGER PRIMARY KEY AUTOINCREMENT,recommendation_id INTEGER, product_id INTEGER, device TEXT, os TEXT, time_of_day TEXT, day_of_week TEXT, latitude float, longitude float,num_items_in_cart INTEGER, purchases_in_last_month INTEGER)\"\"\"\n )\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS recommendations\n (id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER, product_id INTEGER, interacted BOOLEAN)\"\"\"\n )\n\n\ndef load_fake_data(conn):\n c = conn.cursor()\n c.execute('DELETE FROM catalogs')\n c.execute('DELETE FROM products')\n c.execute('DELETE FROM users')\n c.execute('DELETE FROM products_bought')\n c.execute('DELETE FROM product_context')\n c.execute('DELETE FROM recommendations')\n catalogs = []\n c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', ('BestBuy',))\n catalogs.append(c.lastrowid)\n c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', ('RiteAid',))\n catalogs.append(c.lastrowid)\n ppl = []\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Tomer',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Alex',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Matt',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Rachael',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Sam',))\n ppl.append(c.lastrowid)\n c.execute('INSERT INTO users (user_name) VALUES (?)', ('Joey',))\n ppl.append(c.lastrowid)\n products = []\n for i in range(1, 20):\n c.execute(\n 'INSERT INTO products (id,sku_id,catalog_id, product_name, price,description) VALUES (NULL,?,?,?,?,?)'\n , (randint(1, 2000), catalogs[randint(0, len(catalogs) - 1)], \n 'Movie' + str(i), randint(1, 2000), 'Title' + str(i)))\n products.append(c.lastrowid)\n for i in range(1, 50):\n c.execute(\n 'INSERT INTO products_bought (id,user_id, product_id) VALUES (NULL,?,?)'\n , (ppl[randint(0, len(ppl) - 1)], products[randint(0, len(\n products) - 1)]))\n values = c.lastrowid, device[randint(0, len(device) - 1)], oses[randint\n (0, len(oses) - 1)], times[randint(0, len(times) - 1)], days[\n randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[\n randint(0, len(lons) - 1)], randint(0, 5), randint(0, 30)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)'\n , values)\n for i in range(1, 1000):\n product_id = products[randint(0, len(products) - 1)]\n c.execute(\n \"INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'true')\"\n , (ppl[randint(0, len(ppl) - 1)], product_id))\n values = c.lastrowid, product_id, device[randint(0, len(device) - 1)\n ], oses[randint(0, len(oses) - 1)], times[randint(0, len(times) -\n 1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) -\n 1)], lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'\n , values)\n conn.commit()\n\n\noses = ['IOS', 'Android']\ndevice = ['mobile']\n<mask token>\ntimes = ['morning', 'afternoon', 'night']\ndays = ['M']\n<mask token>\nlats = ['north']\n<mask token>\nlons = ['east']\n\n\ndef get_users(conn):\n c = conn.cursor()\n c.execute('select * from users')\n return c.fetchall()\n\n\ndef get_catalogs(conn):\n c = conn.cursor()\n c.execute('select * from catalogs')\n return c.fetchall()\n\n\ndef get_products(conn, catalog_id):\n c = conn.cursor()\n c.execute('select * from products where catalog_id = ?', (catalog_id,))\n return c.fetchall()\n\n\ndef get_product_by_id(conn, catalog_id, product_id):\n c = conn.cursor()\n c.execute('SELECT * FROM products WHERE catalog_id = ? AND id = ?', (\n catalog_id, product_id))\n return c.fetchall()\n\n\ndef get_products_bought(conn, catalog_id):\n c = conn.cursor()\n c.execute(\n 'select pb.* from products_bought pb, catalogs cat, products p where pb.product_id = p.id and p.catalog_id = ?'\n , (catalog_id,))\n return c.fetchall()\n\n\ndef get_all_data(conn):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id'\n )\n return c.fetchall()\n\n\ndef get_data_for_user(conn, userid):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ?'\n , (userid,))\n return c.fetchall()\n\n\ndef get_data_for_user_and_catalog(conn, userid, catalogid):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ? and c.id = ?'\n , (userid, catalogid))\n return c.fetchall()\n\n\ndef get_transactions_for_catalog(conn, catalogid):\n c = conn.cursor()\n c.execute(\n 'select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and c.id = ?'\n , (catalogid,))\n return c.fetchall()\n\n\ndef get_recommendations_by_user(conn, userId):\n c = conn.cursor()\n c.execute(\n 'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.user_id = ?'\n , (userId,))\n return c.fetchall()\n\n\ndef get_recommendations_by_product(conn, productId):\n c = conn.cursor()\n c.execute(\n 'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.product_id = ?'\n , (productId,))\n return c.fetchall()\n\n\ndef get_connection():\n return sqlite3.connect('recommendation_engine.db')\n\n\ndef generate_context(product_id):\n return [product_id, device[randint(0, len(device) - 1)], oses[randint(0,\n len(oses) - 1)], times[randint(0, len(times) - 1)], days[randint(0,\n len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[randint(0, \n len(lons) - 1)], randint(0, 3), randint(0, 3)]\n\n\ndef add_recommendation(conn, product_ids, user_ids, contexts):\n ids = []\n c = conn.cursor()\n for i in range(0, len(product_ids)):\n product_id = product_ids[i]\n user_id = user_ids[i]\n context = contexts[i]\n c.execute(\n \"INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'false')\"\n , (user_id, product_id))\n context.insert(0, c.lastrowid)\n ids.append(c.lastrowid)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'\n , context)\n conn.commit()\n c.execute(\n 'select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.id in (%s)'\n % ','.join('?' * len(ids)), ids)\n return c.fetchall()\n\n\ndef get_probability(conn, x, giveny):\n c = conn.cursor()\n query = 'select count(*) from product_context where '\n first = True\n params = []\n for key, val in x.items():\n if not first:\n query += ' and '\n else:\n first = False\n query += str(key) + '=?'\n params.append(str(val))\n c.execute(query, params)\n total = c.fetchone()[0]\n for key, val in giveny.items():\n query += ' and ' + str(key) + '=?'\n params.append(str(val))\n c.execute(query, params)\n smaller = c.fetchone()[0]\n if total == 0:\n return 0\n else:\n return smaller / float(total)\n\n\ndef load_test_data(conn):\n c = conn.cursor()\n c.execute('DELETE FROM catalogs')\n c.execute('DELETE FROM products')\n c.execute('DELETE FROM users')\n c.execute('DELETE FROM products_bought')\n c.execute('DELETE FROM product_context')\n c.execute('DELETE FROM recommendations')\n user_names = test_data.USER_NAMES\n product_names = test_data.PRODUCT_NAMES\n prices = test_data.POSSIBLE_PRICES\n catalog_ids = []\n c.execute('INSERT INTO catalogs (catalog_name) VALUES (?)', (\n 'MovieDatabase',))\n catalog_ids.append(c.lastrowid)\n user_ids = []\n for user in user_names:\n c.execute('INSERT INTO users (user_name) VALUES (?)', (user,))\n user_ids.append(c.lastrowid)\n product_ids = []\n for product in product_names:\n values = randint(1, 2000), catalog_ids[0], product, prices[randint(\n 0, len(prices) - 1)], 'desc'\n c.execute(\n 'INSERT INTO products (id, sku_id, catalog_id, product_name, price, description) VALUES (NULL,?,?,?,?,?)'\n , values)\n product_ids.append(c.lastrowid)\n for i in range(1, 50):\n values = user_ids[randint(0, len(user_ids) - 1)], product_ids[randint\n (0, len(product_ids) - 1)]\n c.execute(\n 'INSERT INTO products_bought (id,user_id,product_id) VALUES (NULL,?,?)'\n , values)\n values = c.lastrowid, device[randint(0, len(device) - 1)], oses[randint\n (0, len(oses) - 1)], times[randint(0, len(times) - 1)], days[\n randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)], lons[\n randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)'\n , values)\n for i in range(1, 1000):\n product_id = product_ids[randint(0, len(product_ids) - 1)]\n values = user_ids[randint(0, len(user_ids) - 1)], product_id\n c.execute(\n \"INSERT INTO recommendations (id,user_id,product_id,interacted) VALUES (NULL,?,?,'True')\"\n , values)\n values = c.lastrowid, product_id, device[randint(0, len(device) - 1)\n ], oses[randint(0, len(oses) - 1)], times[randint(0, len(times) -\n 1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) -\n 1)], lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)\n c.execute(\n 'INSERT INTO product_context (id,recommendation_id,product_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)'\n , values)\n conn.commit()\n",
"step-5": "__author__ = 'tomer'\nimport sqlite3\nfrom random import randint\nimport test_data\n\ndef init_database(conn):\n c = conn.cursor()\n c.execute('''CREATE TABLE IF NOT EXISTS catalogs\n (id INTEGER PRIMARY KEY AUTOINCREMENT, catalog_name TEXT)''')\n c.execute('''CREATE TABLE IF NOT EXISTS products\n (id INTEGER PRIMARY KEY AUTOINCREMENT, sku_id INTEGER, catalog_id INTEGER, product_name TEXT, price FLOAT, description TEXT)''')\n c.execute('''CREATE TABLE IF NOT EXISTS users\n (id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT)''')\n c.execute('''CREATE TABLE IF NOT EXISTS products_bought\n (id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER,product_id INTEGER)''')\n c.execute('''CREATE TABLE IF NOT EXISTS product_context\n (id INTEGER PRIMARY KEY AUTOINCREMENT,recommendation_id INTEGER, product_id INTEGER, device TEXT, os TEXT, time_of_day TEXT, day_of_week TEXT, latitude float, longitude float,num_items_in_cart INTEGER, purchases_in_last_month INTEGER)''')\n c.execute('''CREATE TABLE IF NOT EXISTS recommendations\n (id INTEGER PRIMARY KEY AUTOINCREMENT,user_id INTEGER, product_id INTEGER, interacted BOOLEAN)''')\n\n\ndef load_fake_data(conn):\n\n c = conn.cursor()\n c.execute('''DELETE FROM catalogs''')\n c.execute('''DELETE FROM products''')\n c.execute('''DELETE FROM users''')\n c.execute('''DELETE FROM products_bought''')\n c.execute('''DELETE FROM product_context''')\n c.execute('''DELETE FROM recommendations''')\n\n catalogs = []\n c.execute('''INSERT INTO catalogs (catalog_name) VALUES (?)''',('BestBuy',))\n catalogs.append(c.lastrowid)\n c.execute('''INSERT INTO catalogs (catalog_name) VALUES (?)''',('RiteAid',))\n catalogs.append(c.lastrowid)\n\n\n ppl = []\n c.execute('''INSERT INTO users (user_name) VALUES (?)''',('Tomer',))\n ppl.append(c.lastrowid)\n c.execute('''INSERT INTO users (user_name) VALUES (?)''',('Alex',))\n ppl.append(c.lastrowid)\n c.execute('''INSERT INTO users (user_name) VALUES (?)''',('Matt',))\n ppl.append(c.lastrowid)\n c.execute('''INSERT INTO users (user_name) VALUES (?)''',('Rachael',))\n ppl.append(c.lastrowid)\n c.execute('''INSERT INTO users (user_name) VALUES (?)''',('Sam',))\n ppl.append(c.lastrowid)\n c.execute('''INSERT INTO users (user_name) VALUES (?)''',('Joey',))\n ppl.append(c.lastrowid)\n\n products = []\n # Load fake products\n for i in range(1,20):\n c.execute('''INSERT INTO products (id,sku_id,catalog_id, product_name, price,description) VALUES (NULL,?,?,?,?,?)''',(randint(1,2000),catalogs[randint(0,len(catalogs)-1)],'Movie' + str(i),randint(1,2000),'Title' + str(i)))\n products.append(c.lastrowid)\n\n # Load fake transactions\n for i in range(1,50):\n c.execute('''INSERT INTO products_bought (id,user_id, product_id) VALUES (NULL,?,?)''',(ppl[randint(0,len(ppl)-1)],products[randint(0,len(products)-1)]))\n values = (c.lastrowid,device[randint(0,len(device)-1)],oses[randint(0,len(oses)-1)], times[randint(0,len(times)-1)], days[randint(0,len(days)-1)], lats[randint(0,len(lats)-1)], lons[randint(0,len(lons)-1)],randint(0,5),randint(0,30))\n c.execute('''INSERT INTO product_context (id,recommendation_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)''',values)\n\n # Load fake recommendations\n for i in range(1,1000):\n product_id = products[randint(0, len(products) - 1)]\n c.execute('''INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'true')''',(ppl[randint(0,len(ppl)-1)],product_id))\n values = (c.lastrowid,product_id,device[randint(0,len(device)-1)],oses[randint(0,len(oses)-1)], times[randint(0,len(times)-1)], days[randint(0,len(days)-1)], lats[randint(0,len(lats)-1)], lons[randint(0,len(lons)-1)],randint(0,3),randint(0,3))\n c.execute('''INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)''',values)\n conn.commit()\n\n\noses = ['IOS', 'Android']#, 'Windows10', 'macOS']\ndevice = ['mobile']#, 'computer']\n'''\ntimes = ['10:33 AM',\n'2:38 PM',\n'3:01 AM',\n'12:31 AM',\n'2:56 PM',\n'8:01 AM',\n'5:00 PM',\n'9:38 PM',\n'3:01 AM']\n'''\ntimes = ['morning', 'afternoon', 'night']\n\ndays = ['M']#['M', 'T', 'W', 'R', 'F', 'S', 'Su']\n\n'''\nlats = ['-149.8935557',\n'-149.9054948',\n'-149.7522',\n'-149.8643361',\n'-149.8379726',\n'-149.9092788',\n'-149.7364877',\n'-149.8211',\n'-149.8445832',\n'-149.9728678']\n'''\nlats = ['north']#, 'south']\n\n'''\nlons = ['61.21759217',\n'61.19533942',\n'61.2297',\n'61.19525062',\n'61.13751355',\n'61.13994658',\n'61.19533265',\n'61.2156',\n'61.13806145',\n'61.176693']\n'''\nlons = ['east']#, 'west']\n\n\ndef get_users(conn):\n c = conn.cursor()\n c.execute('''select * from users''')\n return c.fetchall()\n\n\ndef get_catalogs(conn):\n c = conn.cursor()\n c.execute('''select * from catalogs''')\n return c.fetchall()\n\n\ndef get_products(conn, catalog_id):\n c = conn.cursor()\n c.execute('''select * from products where catalog_id = ?''',(catalog_id,))\n return c.fetchall()\n\n\ndef get_product_by_id(conn, catalog_id, product_id):\n c = conn.cursor()\n c.execute('''SELECT * FROM products WHERE catalog_id = ? AND id = ?''',(catalog_id,product_id))\n return c.fetchall()\n\n\ndef get_products_bought(conn, catalog_id):\n c = conn.cursor()\n c.execute('''select pb.* from products_bought pb, catalogs cat, products p where pb.product_id = p.id and p.catalog_id = ?''',(catalog_id,))\n return c.fetchall()\n\n\ndef get_all_data(conn):\n c = conn.cursor()\n c.execute('''select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id''')\n return c. fetchall()\n\n\ndef get_data_for_user(conn,userid):\n c = conn.cursor()\n c.execute('''select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ?''',(userid,))\n return c.fetchall()\n\n\ndef get_data_for_user_and_catalog(conn, userid, catalogid):\n c = conn.cursor()\n c.execute('''select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and u.id = ? and c.id = ?''',(userid,catalogid))\n return c.fetchall()\n\n\ndef get_transactions_for_catalog(conn,catalogid):\n c = conn.cursor()\n c.execute('''select u.*, p.*, c.* from users u, products p, products_bought pb, catalogs c where p.id = pb.product_id and p.catalog_id == c.id and u.id = pb.user_id and c.id = ?''',(catalogid,))\n return c.fetchall()\n\n\ndef get_recommendations_by_user(conn,userId):\n c = conn.cursor()\n c.execute('''select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.user_id = ?''',(userId,))\n return c.fetchall()\n\n\ndef get_recommendations_by_product(conn,productId):\n c = conn.cursor()\n c.execute('''select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.product_id = ?''',(productId,))\n return c.fetchall()\n\n\ndef get_connection():\n return sqlite3.connect('recommendation_engine.db')\n\n\ndef generate_context(product_id):\n return [product_id, device[randint(0, len(device) - 1)], oses[randint(0, len(oses) - 1)],\n times[randint(0, len(times) - 1)], days[randint(0, len(days) - 1)], lats[randint(0, len(lats) - 1)],\n lons[randint(0, len(lons) - 1)], randint(0, 3), randint(0, 3)]\n\n\ndef add_recommendation(conn, product_ids,user_ids,contexts):\n ids = []\n c = conn.cursor()\n for i in range(0,len(product_ids)):\n product_id = product_ids[i]\n user_id = user_ids[i]\n context = contexts[i]\n c.execute('''INSERT INTO recommendations (id,user_id, product_id, interacted) VALUES (NULL,?,?,'false')''',\n (user_id, product_id))\n context.insert(0,c.lastrowid)\n ids.append(c.lastrowid)\n c.execute( '''INSERT INTO product_context (id,recommendation_id , product_id , device , os , time_of_day , day_of_week , latitude , longitude ,num_items_in_cart , purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)''',\n context)\n conn.commit()\n c.execute('select r.*, c.* from recommendations r, product_context c where r.id = c.recommendation_id and r.id in (%s)' %\n ','.join('?'*len(ids)), ids)\n return c.fetchall()\n\n\ndef get_probability(conn, x, giveny):\n c = conn.cursor()\n query = '''select count(*) from product_context where '''\n first = True\n params = []\n for key,val in x.items():\n if not first:\n query += ' and '\n else:\n first = False\n query += str(key) + '=?'\n params.append(str(val))\n c.execute(query,params)\n total = c.fetchone()[0]\n\n for key,val in giveny.items():\n query += ' and ' + str(key) + '=?'\n params.append(str(val))\n c.execute(query,params)\n smaller = c.fetchone()[0]\n if total == 0:\n return 0\n else:\n return smaller/float(total)\n\n\ndef load_test_data(conn):\n c = conn.cursor()\n\n # Clear database\n c.execute('''DELETE FROM catalogs''')\n c.execute('''DELETE FROM products''')\n c.execute('''DELETE FROM users''')\n c.execute('''DELETE FROM products_bought''')\n c.execute('''DELETE FROM product_context''')\n c.execute('''DELETE FROM recommendations''')\n\n # Initialize users\n user_names = test_data.USER_NAMES\n\n # Initialize movie names\n product_names = test_data.PRODUCT_NAMES\n\n # Initialize Prices\n prices = test_data.POSSIBLE_PRICES\n\n # Load test catalog\n catalog_ids = []\n c.execute('''INSERT INTO catalogs (catalog_name) VALUES (?)''', ('MovieDatabase',))\n catalog_ids.append(c.lastrowid)\n\n # Load test users\n user_ids = []\n for user in user_names:\n c.execute('''INSERT INTO users (user_name) VALUES (?)''', (user,))\n user_ids.append(c.lastrowid)\n\n # Load test products\n product_ids = []\n for product in product_names:\n values = (randint(1, 2000), catalog_ids[0], product, prices[randint(0, len(prices)-1)], 'desc')\n c.execute('''INSERT INTO products (id, sku_id, catalog_id, product_name, price, description) VALUES (NULL,?,?,?,?,?)''', values)\n product_ids.append(c.lastrowid)\n\n # Load fake transactions\n for i in range(1, 50):\n values = (user_ids[randint(0, len(user_ids)-1)], product_ids[randint(0, len(product_ids)-1)])\n c.execute('''INSERT INTO products_bought (id,user_id,product_id) VALUES (NULL,?,?)''', values)\n\n values = (c.lastrowid,\n device[randint(0, len(device) - 1)],\n oses[randint(0, len(oses) - 1)],\n times[randint(0, len(times) - 1)],\n days[randint(0, len(days) - 1)],\n lats[randint(0, len(lats) - 1)],\n lons[randint(0, len(lons) - 1)],\n randint(0, 3),\n randint(0, 3))\n c.execute('''INSERT INTO product_context (id,recommendation_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?)''', values)\n\n # Load fake recommendations\n for i in range(1, 1000):\n product_id = product_ids[randint(0, len(product_ids)-1)]\n values = (user_ids[randint(0, len(user_ids)-1)], product_id,)\n c.execute('''INSERT INTO recommendations (id,user_id,product_id,interacted) VALUES (NULL,?,?,'True')''', values)\n\n values =(c.lastrowid,\n product_id,\n device[randint(0, len(device) - 1)],\n oses[randint(0, len(oses) - 1)],\n times[randint(0, len(times) - 1)],\n days[randint(0, len(days) - 1)],\n lats[randint(0, len(lats) - 1)],\n lons[randint(0, len(lons) - 1)],\n randint(0, 3),\n randint(0, 3))\n c.execute('''INSERT INTO product_context (id,recommendation_id,product_id,device,os,time_of_day,day_of_week,latitude,longitude,num_items_in_cart,purchases_in_last_month) VALUES (NULL,?,?,?,?,?,?,?,?,?,?)''', values)\n\n conn.commit()\n\n",
"step-ids": [
9,
17,
19,
20,
21
]
}
|
[
9,
17,
19,
20,
21
] |
from __future__ import print_function
import os
import shutil
import pymake
import flopy
# set up paths
dstpth = os.path.join('temp')
if not os.path.exists(dstpth):
os.makedirs(dstpth)
mp6pth = os.path.join(dstpth, 'Modpath_7_1_000')
expth = os.path.join(mp6pth, 'examples')
exe_name = 'mp7'
srcpth = os.path.join(mp6pth, 'source')
target = os.path.join(dstpth, exe_name)
def compile_code():
# Remove the existing modpath6 directory if it exists
if os.path.isdir(mp6pth):
shutil.rmtree(mp6pth)
# Download the MODFLOW-2005 distribution
url = "https://water.usgs.gov/ogw/modpath/Modpath_7_1_000.zip"
pymake.download_and_unzip(url, pth=dstpth)
# modify source files that prevent compiling with gfortran
pth = os.path.join(srcpth, 'utl7u1.f')
if os.path.isfile(pth):
os.remove(pth)
fname1 = os.path.join(srcpth, 'ModpathSubCellData.f90')
f = open(fname1, 'r')
fname2 = os.path.join(srcpth, 'ModpathSubCellData_mod.f90')
f2 = open(fname2, 'w')
for line in f:
line = line.replace('location.', 'location%')
f2.write(line)
f.close()
f2.close()
os.remove(fname1)
os.rename(fname2, fname1)
fname1 = os.path.join(srcpth, 'ModpathCellData.f90')
f = open(fname1, 'r')
fname2 = os.path.join(srcpth, 'ModpathCellData_mod.f90')
f2 = open(fname2, 'w')
for line in f:
line = line.replace('dimension(grid%GetCellCount())', 'dimension(:)')
line = line.replace('dimension(grid%GetReducedConnectionCount())',
'dimension(:)')
f2.write(line)
f.close()
f2.close()
os.remove(fname1)
os.rename(fname2, fname1)
fname1 = os.path.join(srcpth, 'MPath7.f90')
f = open(fname1, 'r')
fname2 = os.path.join(srcpth, 'MPath7_mod.f90')
f2 = open(fname2, 'w')
for line in f:
line = line.replace("form='binary', access='stream'",
"form='unformatted', access='stream'")
f2.write(line)
f.close()
f2.close()
os.remove(fname1)
os.rename(fname2, fname1)
# allow line lengths greater than 132 columns
fflags = 'ffree-line-length-512'
# make modpath 7
pymake.main(srcpth, target, 'gfortran', 'gcc', makeclean=True,
expedite=False, dryrun=False, double=False, debug=False,
fflags=fflags)
assert os.path.isfile(target), 'Target does not exist.'
def get_simfiles():
dirs = [name for name in os.listdir(expth) if
os.path.isdir(os.path.join(expth, name))]
simfiles = []
for d in dirs:
pth = os.path.join(expth, d, 'original')
simfiles += [os.path.join(pth, f) for f in os.listdir(pth) if
f.endswith('.mpsim')]
return simfiles
def replace_files():
dirs = [name for name in os.listdir(expth) if
os.path.isdir(os.path.join(expth, name))]
# rename a few files for linux
replace_files = ['example_1.BUD', 'Zones_layer_3.txt',
'Retardation_layer_1.txt']
for d in dirs:
pth = os.path.join(expth, d, 'original')
for rf in replace_files:
fname1 = os.path.join(pth, rf)
if rf in os.listdir(pth):
fname2 = os.path.join(pth, 'temp')
print('copy {} to {}'.format(os.path.basename(fname1),
os.path.basename(fname2)))
shutil.copy(fname1, fname2)
print('deleting {}'.format(os.path.basename(fname1)))
os.remove(fname1)
fname1 = os.path.join(pth, rf.lower())
print('rename {} to {}'.format(os.path.basename(fname2),
os.path.basename(fname1)))
os.rename(fname2, fname1)
def run_modpath7(fn):
# run the model
print('running model...{}'.format(fn))
exe = os.path.abspath(target)
fpth = os.path.basename(fn)
model_ws = os.path.dirname(fn)
success, buff = flopy.run_model(exe, fpth, model_ws=model_ws, silent=False)
assert success, 'could not run...{}'.format(os.path.basename(fn))
return
def clean_up():
# clean up
print('Removing folder ' + mp6pth)
shutil.rmtree(mp6pth)
print('Removing ' + target)
os.remove(target)
return
def test_compile():
# compile MODPATH 7
compile_code()
def test_modpath7():
simfiles = get_simfiles()
replace_files()
for fn in simfiles:
yield run_modpath7, fn
def test_clean_up():
yield clean_up
if __name__ == "__main__":
compile_code()
simfiles = get_simfiles()
replace_files()
for fn in simfiles:
run_modpath7(fn)
clean_up()
|
normal
|
{
"blob_id": "ddaba7a8b53072da36224dd4618696ebf0e9a4e4",
"index": 1015,
"step-1": "<mask token>\n\n\ndef compile_code():\n if os.path.isdir(mp6pth):\n shutil.rmtree(mp6pth)\n url = 'https://water.usgs.gov/ogw/modpath/Modpath_7_1_000.zip'\n pymake.download_and_unzip(url, pth=dstpth)\n pth = os.path.join(srcpth, 'utl7u1.f')\n if os.path.isfile(pth):\n os.remove(pth)\n fname1 = os.path.join(srcpth, 'ModpathSubCellData.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'ModpathSubCellData_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace('location.', 'location%')\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n fname1 = os.path.join(srcpth, 'ModpathCellData.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'ModpathCellData_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace('dimension(grid%GetCellCount())', 'dimension(:)')\n line = line.replace('dimension(grid%GetReducedConnectionCount())',\n 'dimension(:)')\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n fname1 = os.path.join(srcpth, 'MPath7.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'MPath7_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace(\"form='binary', access='stream'\",\n \"form='unformatted', access='stream'\")\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n fflags = 'ffree-line-length-512'\n pymake.main(srcpth, target, 'gfortran', 'gcc', makeclean=True, expedite\n =False, dryrun=False, double=False, debug=False, fflags=fflags)\n assert os.path.isfile(target), 'Target does not exist.'\n\n\n<mask token>\n\n\ndef replace_files():\n dirs = [name for name in os.listdir(expth) if os.path.isdir(os.path.\n join(expth, name))]\n replace_files = ['example_1.BUD', 'Zones_layer_3.txt',\n 'Retardation_layer_1.txt']\n for d in dirs:\n pth = os.path.join(expth, d, 'original')\n for rf in replace_files:\n fname1 = os.path.join(pth, rf)\n if rf in os.listdir(pth):\n fname2 = os.path.join(pth, 'temp')\n print('copy {} to {}'.format(os.path.basename(fname1), os.\n path.basename(fname2)))\n shutil.copy(fname1, fname2)\n print('deleting {}'.format(os.path.basename(fname1)))\n os.remove(fname1)\n fname1 = os.path.join(pth, rf.lower())\n print('rename {} to {}'.format(os.path.basename(fname2), os\n .path.basename(fname1)))\n os.rename(fname2, fname1)\n\n\ndef run_modpath7(fn):\n print('running model...{}'.format(fn))\n exe = os.path.abspath(target)\n fpth = os.path.basename(fn)\n model_ws = os.path.dirname(fn)\n success, buff = flopy.run_model(exe, fpth, model_ws=model_ws, silent=False)\n assert success, 'could not run...{}'.format(os.path.basename(fn))\n return\n\n\ndef clean_up():\n print('Removing folder ' + mp6pth)\n shutil.rmtree(mp6pth)\n print('Removing ' + target)\n os.remove(target)\n return\n\n\ndef test_compile():\n compile_code()\n\n\ndef test_modpath7():\n simfiles = get_simfiles()\n replace_files()\n for fn in simfiles:\n yield run_modpath7, fn\n\n\ndef test_clean_up():\n yield clean_up\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef compile_code():\n if os.path.isdir(mp6pth):\n shutil.rmtree(mp6pth)\n url = 'https://water.usgs.gov/ogw/modpath/Modpath_7_1_000.zip'\n pymake.download_and_unzip(url, pth=dstpth)\n pth = os.path.join(srcpth, 'utl7u1.f')\n if os.path.isfile(pth):\n os.remove(pth)\n fname1 = os.path.join(srcpth, 'ModpathSubCellData.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'ModpathSubCellData_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace('location.', 'location%')\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n fname1 = os.path.join(srcpth, 'ModpathCellData.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'ModpathCellData_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace('dimension(grid%GetCellCount())', 'dimension(:)')\n line = line.replace('dimension(grid%GetReducedConnectionCount())',\n 'dimension(:)')\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n fname1 = os.path.join(srcpth, 'MPath7.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'MPath7_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace(\"form='binary', access='stream'\",\n \"form='unformatted', access='stream'\")\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n fflags = 'ffree-line-length-512'\n pymake.main(srcpth, target, 'gfortran', 'gcc', makeclean=True, expedite\n =False, dryrun=False, double=False, debug=False, fflags=fflags)\n assert os.path.isfile(target), 'Target does not exist.'\n\n\ndef get_simfiles():\n dirs = [name for name in os.listdir(expth) if os.path.isdir(os.path.\n join(expth, name))]\n simfiles = []\n for d in dirs:\n pth = os.path.join(expth, d, 'original')\n simfiles += [os.path.join(pth, f) for f in os.listdir(pth) if f.\n endswith('.mpsim')]\n return simfiles\n\n\ndef replace_files():\n dirs = [name for name in os.listdir(expth) if os.path.isdir(os.path.\n join(expth, name))]\n replace_files = ['example_1.BUD', 'Zones_layer_3.txt',\n 'Retardation_layer_1.txt']\n for d in dirs:\n pth = os.path.join(expth, d, 'original')\n for rf in replace_files:\n fname1 = os.path.join(pth, rf)\n if rf in os.listdir(pth):\n fname2 = os.path.join(pth, 'temp')\n print('copy {} to {}'.format(os.path.basename(fname1), os.\n path.basename(fname2)))\n shutil.copy(fname1, fname2)\n print('deleting {}'.format(os.path.basename(fname1)))\n os.remove(fname1)\n fname1 = os.path.join(pth, rf.lower())\n print('rename {} to {}'.format(os.path.basename(fname2), os\n .path.basename(fname1)))\n os.rename(fname2, fname1)\n\n\ndef run_modpath7(fn):\n print('running model...{}'.format(fn))\n exe = os.path.abspath(target)\n fpth = os.path.basename(fn)\n model_ws = os.path.dirname(fn)\n success, buff = flopy.run_model(exe, fpth, model_ws=model_ws, silent=False)\n assert success, 'could not run...{}'.format(os.path.basename(fn))\n return\n\n\ndef clean_up():\n print('Removing folder ' + mp6pth)\n shutil.rmtree(mp6pth)\n print('Removing ' + target)\n os.remove(target)\n return\n\n\ndef test_compile():\n compile_code()\n\n\ndef test_modpath7():\n simfiles = get_simfiles()\n replace_files()\n for fn in simfiles:\n yield run_modpath7, fn\n\n\ndef test_clean_up():\n yield clean_up\n\n\n<mask token>\n",
"step-3": "<mask token>\nif not os.path.exists(dstpth):\n os.makedirs(dstpth)\n<mask token>\n\n\ndef compile_code():\n if os.path.isdir(mp6pth):\n shutil.rmtree(mp6pth)\n url = 'https://water.usgs.gov/ogw/modpath/Modpath_7_1_000.zip'\n pymake.download_and_unzip(url, pth=dstpth)\n pth = os.path.join(srcpth, 'utl7u1.f')\n if os.path.isfile(pth):\n os.remove(pth)\n fname1 = os.path.join(srcpth, 'ModpathSubCellData.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'ModpathSubCellData_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace('location.', 'location%')\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n fname1 = os.path.join(srcpth, 'ModpathCellData.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'ModpathCellData_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace('dimension(grid%GetCellCount())', 'dimension(:)')\n line = line.replace('dimension(grid%GetReducedConnectionCount())',\n 'dimension(:)')\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n fname1 = os.path.join(srcpth, 'MPath7.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'MPath7_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace(\"form='binary', access='stream'\",\n \"form='unformatted', access='stream'\")\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n fflags = 'ffree-line-length-512'\n pymake.main(srcpth, target, 'gfortran', 'gcc', makeclean=True, expedite\n =False, dryrun=False, double=False, debug=False, fflags=fflags)\n assert os.path.isfile(target), 'Target does not exist.'\n\n\ndef get_simfiles():\n dirs = [name for name in os.listdir(expth) if os.path.isdir(os.path.\n join(expth, name))]\n simfiles = []\n for d in dirs:\n pth = os.path.join(expth, d, 'original')\n simfiles += [os.path.join(pth, f) for f in os.listdir(pth) if f.\n endswith('.mpsim')]\n return simfiles\n\n\ndef replace_files():\n dirs = [name for name in os.listdir(expth) if os.path.isdir(os.path.\n join(expth, name))]\n replace_files = ['example_1.BUD', 'Zones_layer_3.txt',\n 'Retardation_layer_1.txt']\n for d in dirs:\n pth = os.path.join(expth, d, 'original')\n for rf in replace_files:\n fname1 = os.path.join(pth, rf)\n if rf in os.listdir(pth):\n fname2 = os.path.join(pth, 'temp')\n print('copy {} to {}'.format(os.path.basename(fname1), os.\n path.basename(fname2)))\n shutil.copy(fname1, fname2)\n print('deleting {}'.format(os.path.basename(fname1)))\n os.remove(fname1)\n fname1 = os.path.join(pth, rf.lower())\n print('rename {} to {}'.format(os.path.basename(fname2), os\n .path.basename(fname1)))\n os.rename(fname2, fname1)\n\n\ndef run_modpath7(fn):\n print('running model...{}'.format(fn))\n exe = os.path.abspath(target)\n fpth = os.path.basename(fn)\n model_ws = os.path.dirname(fn)\n success, buff = flopy.run_model(exe, fpth, model_ws=model_ws, silent=False)\n assert success, 'could not run...{}'.format(os.path.basename(fn))\n return\n\n\ndef clean_up():\n print('Removing folder ' + mp6pth)\n shutil.rmtree(mp6pth)\n print('Removing ' + target)\n os.remove(target)\n return\n\n\ndef test_compile():\n compile_code()\n\n\ndef test_modpath7():\n simfiles = get_simfiles()\n replace_files()\n for fn in simfiles:\n yield run_modpath7, fn\n\n\ndef test_clean_up():\n yield clean_up\n\n\nif __name__ == '__main__':\n compile_code()\n simfiles = get_simfiles()\n replace_files()\n for fn in simfiles:\n run_modpath7(fn)\n clean_up()\n",
"step-4": "from __future__ import print_function\nimport os\nimport shutil\nimport pymake\nimport flopy\ndstpth = os.path.join('temp')\nif not os.path.exists(dstpth):\n os.makedirs(dstpth)\nmp6pth = os.path.join(dstpth, 'Modpath_7_1_000')\nexpth = os.path.join(mp6pth, 'examples')\nexe_name = 'mp7'\nsrcpth = os.path.join(mp6pth, 'source')\ntarget = os.path.join(dstpth, exe_name)\n\n\ndef compile_code():\n if os.path.isdir(mp6pth):\n shutil.rmtree(mp6pth)\n url = 'https://water.usgs.gov/ogw/modpath/Modpath_7_1_000.zip'\n pymake.download_and_unzip(url, pth=dstpth)\n pth = os.path.join(srcpth, 'utl7u1.f')\n if os.path.isfile(pth):\n os.remove(pth)\n fname1 = os.path.join(srcpth, 'ModpathSubCellData.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'ModpathSubCellData_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace('location.', 'location%')\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n fname1 = os.path.join(srcpth, 'ModpathCellData.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'ModpathCellData_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace('dimension(grid%GetCellCount())', 'dimension(:)')\n line = line.replace('dimension(grid%GetReducedConnectionCount())',\n 'dimension(:)')\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n fname1 = os.path.join(srcpth, 'MPath7.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'MPath7_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace(\"form='binary', access='stream'\",\n \"form='unformatted', access='stream'\")\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n fflags = 'ffree-line-length-512'\n pymake.main(srcpth, target, 'gfortran', 'gcc', makeclean=True, expedite\n =False, dryrun=False, double=False, debug=False, fflags=fflags)\n assert os.path.isfile(target), 'Target does not exist.'\n\n\ndef get_simfiles():\n dirs = [name for name in os.listdir(expth) if os.path.isdir(os.path.\n join(expth, name))]\n simfiles = []\n for d in dirs:\n pth = os.path.join(expth, d, 'original')\n simfiles += [os.path.join(pth, f) for f in os.listdir(pth) if f.\n endswith('.mpsim')]\n return simfiles\n\n\ndef replace_files():\n dirs = [name for name in os.listdir(expth) if os.path.isdir(os.path.\n join(expth, name))]\n replace_files = ['example_1.BUD', 'Zones_layer_3.txt',\n 'Retardation_layer_1.txt']\n for d in dirs:\n pth = os.path.join(expth, d, 'original')\n for rf in replace_files:\n fname1 = os.path.join(pth, rf)\n if rf in os.listdir(pth):\n fname2 = os.path.join(pth, 'temp')\n print('copy {} to {}'.format(os.path.basename(fname1), os.\n path.basename(fname2)))\n shutil.copy(fname1, fname2)\n print('deleting {}'.format(os.path.basename(fname1)))\n os.remove(fname1)\n fname1 = os.path.join(pth, rf.lower())\n print('rename {} to {}'.format(os.path.basename(fname2), os\n .path.basename(fname1)))\n os.rename(fname2, fname1)\n\n\ndef run_modpath7(fn):\n print('running model...{}'.format(fn))\n exe = os.path.abspath(target)\n fpth = os.path.basename(fn)\n model_ws = os.path.dirname(fn)\n success, buff = flopy.run_model(exe, fpth, model_ws=model_ws, silent=False)\n assert success, 'could not run...{}'.format(os.path.basename(fn))\n return\n\n\ndef clean_up():\n print('Removing folder ' + mp6pth)\n shutil.rmtree(mp6pth)\n print('Removing ' + target)\n os.remove(target)\n return\n\n\ndef test_compile():\n compile_code()\n\n\ndef test_modpath7():\n simfiles = get_simfiles()\n replace_files()\n for fn in simfiles:\n yield run_modpath7, fn\n\n\ndef test_clean_up():\n yield clean_up\n\n\nif __name__ == '__main__':\n compile_code()\n simfiles = get_simfiles()\n replace_files()\n for fn in simfiles:\n run_modpath7(fn)\n clean_up()\n",
"step-5": "from __future__ import print_function\nimport os\nimport shutil\nimport pymake\nimport flopy\n\n# set up paths\ndstpth = os.path.join('temp')\nif not os.path.exists(dstpth):\n os.makedirs(dstpth)\nmp6pth = os.path.join(dstpth, 'Modpath_7_1_000')\nexpth = os.path.join(mp6pth, 'examples')\n\nexe_name = 'mp7'\nsrcpth = os.path.join(mp6pth, 'source')\ntarget = os.path.join(dstpth, exe_name)\n\n\ndef compile_code():\n # Remove the existing modpath6 directory if it exists\n if os.path.isdir(mp6pth):\n shutil.rmtree(mp6pth)\n\n # Download the MODFLOW-2005 distribution\n url = \"https://water.usgs.gov/ogw/modpath/Modpath_7_1_000.zip\"\n pymake.download_and_unzip(url, pth=dstpth)\n\n # modify source files that prevent compiling with gfortran\n pth = os.path.join(srcpth, 'utl7u1.f')\n if os.path.isfile(pth):\n os.remove(pth)\n\n fname1 = os.path.join(srcpth, 'ModpathSubCellData.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'ModpathSubCellData_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace('location.', 'location%')\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n\n fname1 = os.path.join(srcpth, 'ModpathCellData.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'ModpathCellData_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace('dimension(grid%GetCellCount())', 'dimension(:)')\n line = line.replace('dimension(grid%GetReducedConnectionCount())',\n 'dimension(:)')\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n\n fname1 = os.path.join(srcpth, 'MPath7.f90')\n f = open(fname1, 'r')\n fname2 = os.path.join(srcpth, 'MPath7_mod.f90')\n f2 = open(fname2, 'w')\n for line in f:\n line = line.replace(\"form='binary', access='stream'\",\n \"form='unformatted', access='stream'\")\n f2.write(line)\n f.close()\n f2.close()\n os.remove(fname1)\n os.rename(fname2, fname1)\n\n # allow line lengths greater than 132 columns\n fflags = 'ffree-line-length-512'\n\n # make modpath 7\n pymake.main(srcpth, target, 'gfortran', 'gcc', makeclean=True,\n expedite=False, dryrun=False, double=False, debug=False,\n fflags=fflags)\n\n assert os.path.isfile(target), 'Target does not exist.'\n\n\ndef get_simfiles():\n dirs = [name for name in os.listdir(expth) if\n os.path.isdir(os.path.join(expth, name))]\n simfiles = []\n for d in dirs:\n pth = os.path.join(expth, d, 'original')\n simfiles += [os.path.join(pth, f) for f in os.listdir(pth) if\n f.endswith('.mpsim')]\n return simfiles\n\ndef replace_files():\n dirs = [name for name in os.listdir(expth) if\n os.path.isdir(os.path.join(expth, name))]\n # rename a few files for linux\n replace_files = ['example_1.BUD', 'Zones_layer_3.txt',\n 'Retardation_layer_1.txt']\n for d in dirs:\n pth = os.path.join(expth, d, 'original')\n for rf in replace_files:\n fname1 = os.path.join(pth, rf)\n if rf in os.listdir(pth):\n fname2 = os.path.join(pth, 'temp')\n print('copy {} to {}'.format(os.path.basename(fname1),\n os.path.basename(fname2)))\n shutil.copy(fname1, fname2)\n print('deleting {}'.format(os.path.basename(fname1)))\n os.remove(fname1)\n fname1 = os.path.join(pth, rf.lower())\n print('rename {} to {}'.format(os.path.basename(fname2),\n os.path.basename(fname1)))\n os.rename(fname2, fname1)\n\ndef run_modpath7(fn):\n # run the model\n print('running model...{}'.format(fn))\n exe = os.path.abspath(target)\n fpth = os.path.basename(fn)\n model_ws = os.path.dirname(fn)\n success, buff = flopy.run_model(exe, fpth, model_ws=model_ws, silent=False)\n assert success, 'could not run...{}'.format(os.path.basename(fn))\n return\n\n\ndef clean_up():\n # clean up\n print('Removing folder ' + mp6pth)\n shutil.rmtree(mp6pth)\n print('Removing ' + target)\n os.remove(target)\n return\n\n\ndef test_compile():\n # compile MODPATH 7\n compile_code()\n\n\ndef test_modpath7():\n simfiles = get_simfiles()\n replace_files()\n for fn in simfiles:\n yield run_modpath7, fn\n\n\ndef test_clean_up():\n yield clean_up\n\n\nif __name__ == \"__main__\":\n compile_code()\n simfiles = get_simfiles()\n replace_files()\n for fn in simfiles:\n run_modpath7(fn)\n clean_up()\n",
"step-ids": [
7,
8,
9,
11,
12
]
}
|
[
7,
8,
9,
11,
12
] |
"""
Class for manage tables in Storage and Big Query
"""
# pylint: disable=invalid-name, too-many-locals, too-many-branches, too-many-arguments,line-too-long,R0801,consider-using-f-string
from pathlib import Path
import json
from copy import deepcopy
import textwrap
import inspect
from io import StringIO
from loguru import logger
from google.cloud import bigquery
import ruamel.yaml as ryaml
import requests
import pandas as pd
import google.api_core.exceptions
from basedosdados.upload.base import Base
from basedosdados.upload.storage import Storage
from basedosdados.upload.dataset import Dataset
from basedosdados.upload.datatypes import Datatype
from basedosdados.upload.metadata import Metadata
from basedosdados.exceptions import BaseDosDadosException
class Table(Base):
"""
Manage tables in Google Cloud Storage and BigQuery.
"""
def __init__(self, dataset_id, table_id, **kwargs):
super().__init__(**kwargs)
self.table_id = table_id.replace("-", "_")
self.dataset_id = dataset_id.replace("-", "_")
self.dataset_folder = Path(self.metadata_path / self.dataset_id)
self.table_folder = self.dataset_folder / table_id
self.table_full_name = dict(
prod=f"{self.client['bigquery_prod'].project}.{self.dataset_id}.{self.table_id}",
staging=f"{self.client['bigquery_staging'].project}.{self.dataset_id}_staging.{self.table_id}",
)
self.table_full_name.update(dict(all=deepcopy(self.table_full_name)))
self.metadata = Metadata(self.dataset_id, self.table_id, **kwargs)
@property
def table_config(self):
"""
Load table_config.yaml
"""
return self._load_yaml(self.table_folder / "table_config.yaml")
def _get_table_obj(self, mode):
"""
Get table object from BigQuery
"""
return self.client[f"bigquery_{mode}"].get_table(self.table_full_name[mode])
def _is_partitioned(self):
"""
Check if table is partitioned
"""
## check if the table are partitioned, need the split because of a change in the type of partitions in pydantic
partitions = self.table_config["partitions"]
if partitions is None or len(partitions) == 0:
return False
if isinstance(partitions, list):
# check if any None inside list.
# False if it is the case Ex: [None, 'partition']
# True otherwise Ex: ['partition1', 'partition2']
return all(item is not None for item in partitions)
raise ValueError("Partitions must be a list or None")
def _load_schema(self, mode="staging"):
"""Load schema from table_config.yaml
Args:
mode (bool): Which dataset to create [prod|staging].
"""
self._check_mode(mode)
json_path = self.table_folder / f"schema-{mode}.json"
columns = self.table_config["columns"]
if mode == "staging":
new_columns = []
for c in columns:
# case is_in_staging are None then must be True
is_in_staging = (
True if c.get("is_in_staging") is None else c["is_in_staging"]
)
# append columns declared in table_config.yaml to schema only if is_in_staging: True
if is_in_staging and not c.get("is_partition"):
c["type"] = "STRING"
new_columns.append(c)
del columns
columns = new_columns
elif mode == "prod":
schema = self._get_table_obj(mode).schema
# get field names for fields at schema and at table_config.yaml
column_names = [c["name"] for c in columns]
schema_names = [s.name for s in schema]
# check if there are mismatched fields
not_in_columns = [name for name in schema_names if name not in column_names]
not_in_schema = [name for name in column_names if name not in schema_names]
# raise if field is not in table_config
if not_in_columns:
raise BaseDosDadosException(
"Column {error_columns} was not found in table_config.yaml. Are you sure that "
"all your column names between table_config.yaml, publish.sql and "
"{project_id}.{dataset_id}.{table_id} are the same?".format(
error_columns=not_in_columns,
project_id=self.table_config["project_id_prod"],
dataset_id=self.table_config["dataset_id"],
table_id=self.table_config["table_id"],
)
)
# raise if field is not in schema
if not_in_schema:
raise BaseDosDadosException(
"Column {error_columns} was not found in publish.sql. Are you sure that "
"all your column names between table_config.yaml, publish.sql and "
"{project_id}.{dataset_id}.{table_id} are the same?".format(
error_columns=not_in_schema,
project_id=self.table_config["project_id_prod"],
dataset_id=self.table_config["dataset_id"],
table_id=self.table_config["table_id"],
)
)
# if field is in schema, get field_type and field_mode
for c in columns:
for s in schema:
if c["name"] == s.name:
c["type"] = s.field_type
c["mode"] = s.mode
break
## force utf-8, write schema_{mode}.json
json.dump(columns, (json_path).open("w", encoding="utf-8"))
# load new created schema
return self.client[f"bigquery_{mode}"].schema_from_json(str(json_path))
def _make_publish_sql(self):
"""Create publish.sql with columns and bigquery_type"""
### publish.sql header and instructions
publish_txt = """
/*
Query para publicar a tabela.
Esse é o lugar para:
- modificar nomes, ordem e tipos de colunas
- dar join com outras tabelas
- criar colunas extras (e.g. logs, proporções, etc.)
Qualquer coluna definida aqui deve também existir em `table_config.yaml`.
# Além disso, sinta-se à vontade para alterar alguns nomes obscuros
# para algo um pouco mais explícito.
TIPOS:
- Para modificar tipos de colunas, basta substituir STRING por outro tipo válido.
- Exemplo: `SAFE_CAST(column_name AS NUMERIC) column_name`
- Mais detalhes: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types
*/
"""
# remove triple quotes extra space
publish_txt = inspect.cleandoc(publish_txt)
publish_txt = textwrap.dedent(publish_txt)
# add create table statement
project_id_prod = self.client["bigquery_prod"].project
publish_txt += f"\n\nCREATE VIEW {project_id_prod}.{self.dataset_id}.{self.table_id} AS\nSELECT \n"
# sort columns by is_partition, partitions_columns come first
if self._is_partitioned():
columns = sorted(
self.table_config["columns"],
key=lambda k: (k["is_partition"] is not None, k["is_partition"]),
reverse=True,
)
else:
columns = self.table_config["columns"]
# add columns in publish.sql
for col in columns:
name = col["name"]
bigquery_type = (
"STRING"
if col["bigquery_type"] is None
else col["bigquery_type"].upper()
)
publish_txt += f"SAFE_CAST({name} AS {bigquery_type}) {name},\n"
## remove last comma
publish_txt = publish_txt[:-2] + "\n"
# add from statement
project_id_staging = self.client["bigquery_staging"].project
publish_txt += (
f"FROM {project_id_staging}.{self.dataset_id}_staging.{self.table_id} AS t"
)
# save publish.sql in table_folder
(self.table_folder / "publish.sql").open("w", encoding="utf-8").write(
publish_txt
)
def _make_template(self, columns, partition_columns, if_table_config_exists, force_columns):
# create table_config.yaml with metadata
self.metadata.create(
if_exists=if_table_config_exists,
columns=partition_columns + columns,
partition_columns=partition_columns,
force_columns=force_columns,
table_only=False,
)
self._make_publish_sql()
@staticmethod
def _sheet_to_df(columns_config_url_or_path):
"""
Convert sheet to dataframe
"""
url = columns_config_url_or_path.replace("edit#gid=", "export?format=csv&gid=")
try:
return pd.read_csv(StringIO(requests.get(url, timeout=10).content.decode("utf-8")))
except Exception as e:
raise BaseDosDadosException(
"Check if your google sheet Share are: Anyone on the internet with this link can view"
) from e
def table_exists(self, mode):
"""Check if table exists in BigQuery.
Args:
mode (str): Which dataset to check [prod|staging].
"""
try:
ref = self._get_table_obj(mode=mode)
except google.api_core.exceptions.NotFound:
ref = None
return bool(ref)
def update_columns(self, columns_config_url_or_path=None):
"""
Fills columns in table_config.yaml automatically using a public google sheets URL or a local file. Also regenerate
publish.sql and autofill type using bigquery_type.
The sheet must contain the columns:
- name: column name
- description: column description
- bigquery_type: column bigquery type
- measurement_unit: column mesurement unit
- covered_by_dictionary: column related dictionary
- directory_column: column related directory in the format <dataset_id>.<table_id>:<column_name>
- temporal_coverage: column temporal coverage
- has_sensitive_data: the column has sensitive data
- observations: column observations
Args:
columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL.
Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats.
Google sheets URL must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>.
"""
ruamel = ryaml.YAML()
ruamel.preserve_quotes = True
ruamel.indent(mapping=4, sequence=6, offset=4)
table_config_yaml = ruamel.load(
(self.table_folder / "table_config.yaml").open(encoding="utf-8")
)
if "https://docs.google.com/spreadsheets/d/" in columns_config_url_or_path:
if (
"edit#gid=" not in columns_config_url_or_path
or "https://docs.google.com/spreadsheets/d/"
not in columns_config_url_or_path
or not columns_config_url_or_path.split("=")[1].isdigit()
):
raise BaseDosDadosException(
"The Google sheet url not in correct format."
"The url must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>"
)
df = self._sheet_to_df(columns_config_url_or_path)
else:
file_type = columns_config_url_or_path.split(".")[-1]
if file_type == "csv":
df = pd.read_csv(columns_config_url_or_path, encoding="utf-8")
elif file_type in ["xls", "xlsx", "xlsm", "xlsb", "odf", "ods", "odt"]:
df = pd.read_excel(columns_config_url_or_path)
else:
raise BaseDosDadosException(
"File not suported. Only csv, xls, xlsx, xlsm, xlsb, odf, ods, odt are supported."
)
df = df.fillna("NULL")
required_columns = [
"name",
"bigquery_type",
"description",
"temporal_coverage",
"covered_by_dictionary",
"directory_column",
"measurement_unit",
"has_sensitive_data",
"observations",
]
not_found_columns = required_columns.copy()
for sheet_column in df.columns.tolist():
for required_column in required_columns:
if sheet_column == required_column:
not_found_columns.remove(required_column)
if not_found_columns:
raise BaseDosDadosException(
f"The following required columns are not found: {', '.join(not_found_columns)}."
)
columns_parameters = zip(
*[df[required_column].tolist() for required_column in required_columns]
)
for (
name,
bigquery_type,
description,
temporal_coverage,
covered_by_dictionary,
directory_column,
measurement_unit,
has_sensitive_data,
observations,
) in columns_parameters:
for col in table_config_yaml["columns"]:
if col["name"] == name:
col["bigquery_type"] = (
col["bigquery_type"]
if bigquery_type == "NULL"
else bigquery_type.lower()
)
col["description"] = (
col["description"] if description == "NULL" else description
)
col["temporal_coverage"] = (
col["temporal_coverage"]
if temporal_coverage == "NULL"
else [temporal_coverage]
)
col["covered_by_dictionary"] = (
"no"
if covered_by_dictionary == "NULL"
else covered_by_dictionary
)
dataset = directory_column.split(".")[0]
col["directory_column"]["dataset_id"] = (
col["directory_column"]["dataset_id"]
if dataset == "NULL"
else dataset
)
table = directory_column.split(".")[-1].split(":")[0]
col["directory_column"]["table_id"] = (
col["directory_column"]["table_id"]
if table == "NULL"
else table
)
column = directory_column.split(".")[-1].split(":")[-1]
col["directory_column"]["column_name"] = (
col["directory_column"]["column_name"]
if column == "NULL"
else column
)
col["measurement_unit"] = (
col["measurement_unit"]
if measurement_unit == "NULL"
else measurement_unit
)
col["has_sensitive_data"] = (
"no" if has_sensitive_data == "NULL" else has_sensitive_data
)
col["observations"] = (
col["observations"] if observations == "NULL" else observations
)
with open(self.table_folder / "table_config.yaml", "w", encoding="utf-8") as f:
ruamel.dump(table_config_yaml, f)
# regenerate publish.sql
self._make_publish_sql()
def init(
self,
data_sample_path=None,
if_folder_exists="raise",
if_table_config_exists="raise",
source_format="csv",
force_columns = False,
columns_config_url_or_path=None,
): # sourcery skip: low-code-quality
"""Initialize table folder at metadata_path at `metadata_path/<dataset_id>/<table_id>`.
The folder should contain:
* `table_config.yaml`
* `publish.sql`
You can also point to a sample of the data to auto complete columns names.
Args:
data_sample_path (str, pathlib.PosixPath): Optional.
Data sample path to auto complete columns names
It supports Comma Delimited CSV, Apache Avro and
Apache Parquet.
if_folder_exists (str): Optional.
What to do if table folder exists
* 'raise' : Raises FileExistsError
* 'replace' : Replace folder
* 'pass' : Do nothing
if_table_config_exists (str): Optional
What to do if table_config.yaml and publish.sql exists
* 'raise' : Raises FileExistsError
* 'replace' : Replace files with blank template
* 'pass' : Do nothing
source_format (str): Optional
Data source format. Only 'csv', 'avro' and 'parquet'
are supported. Defaults to 'csv'.
force_columns (bool): Optional.
If set to `True`, overwrite CKAN's columns with the ones provi
ded.
If set to `False`, keep CKAN's columns instead of the ones pro
vided.
columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL.
Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats.
Google sheets URL must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>.
Raises:
FileExistsError: If folder exists and replace is False.
NotImplementedError: If data sample is not in supported type or format.
"""
if not self.dataset_folder.exists():
raise FileExistsError(
f"Dataset folder {self.dataset_folder} folder does not exists. "
"Create a dataset before adding tables."
)
try:
self.table_folder.mkdir(exist_ok=(if_folder_exists == "replace"))
except FileExistsError as e:
if if_folder_exists == "raise":
raise FileExistsError(
f"Table folder already exists for {self.table_id}. "
) from e
if if_folder_exists == "pass":
return self
if not data_sample_path and if_table_config_exists != "pass":
raise BaseDosDadosException(
"You must provide a path to correctly create config files"
)
partition_columns = []
if isinstance(
data_sample_path,
(
str,
Path,
),
):
# Check if partitioned and get data sample and partition columns
data_sample_path = Path(data_sample_path)
if data_sample_path.is_dir():
data_sample_path = [
f
for f in data_sample_path.glob("**/*")
if f.is_file() and f.suffix == f".{source_format}"
][0]
partition_columns = [
k.split("=")[0]
for k in data_sample_path.as_posix().split("/")
if "=" in k
]
columns = Datatype(self, source_format).header(data_sample_path)
else:
columns = ["column_name"]
if if_table_config_exists == "pass":
# Check if config files exists before passing
if (
Path(self.table_folder / "table_config.yaml").is_file()
and Path(self.table_folder / "publish.sql").is_file()
):
pass
# Raise if no sample to determine columns
elif not data_sample_path:
raise BaseDosDadosException(
"You must provide a path to correctly create config files"
)
else:
self._make_template(columns, partition_columns, if_table_config_exists, force_columns=force_columns)
elif if_table_config_exists == "raise":
# Check if config files already exist
if (
Path(self.table_folder / "table_config.yaml").is_file()
and Path(self.table_folder / "publish.sql").is_file()
):
raise FileExistsError(
f"table_config.yaml and publish.sql already exists at {self.table_folder}"
)
# if config files don't exist, create them
self._make_template(columns, partition_columns, if_table_config_exists, force_columns=force_columns)
else:
# Raise: without a path to data sample, should not replace config files with empty template
self._make_template(columns, partition_columns, if_table_config_exists, force_columns=force_columns)
if columns_config_url_or_path is not None:
self.update_columns(columns_config_url_or_path)
return self
def create(
self,
path=None,
force_dataset=True,
if_table_exists="raise",
if_storage_data_exists="raise",
if_table_config_exists="raise",
source_format="csv",
force_columns=False,
columns_config_url_or_path=None,
dataset_is_public=True,
location=None,
chunk_size=None,
):
"""Creates BigQuery table at staging dataset.
If you add a path, it automatically saves the data in the storage,
creates a datasets folder and BigQuery location, besides creating the
table and its configuration files.
The new table should be located at `<dataset_id>_staging.<table_id>` in BigQuery.
It looks for data saved in Storage at `<bucket_name>/staging/<dataset_id>/<table_id>/*`
and builds the table.
It currently supports the types:
- Comma Delimited CSV
- Apache Avro
- Apache Parquet
Data can also be partitioned following the hive partitioning scheme
`<key1>=<value1>/<key2>=<value2>` - for instance,
`year=2012/country=BR`. The partition is automatcally detected
by searching for `partitions` on the `table_config.yaml`.
Args:
path (str or pathlib.PosixPath): Where to find the file that you want to upload to create a table with
job_config_params (dict): Optional.
Job configuration params from bigquery
if_table_exists (str): Optional
What to do if table exists
* 'raise' : Raises Conflict exception
* 'replace' : Replace table
* 'pass' : Do nothing
force_dataset (bool): Creates `<dataset_id>` folder and BigQuery Dataset if it doesn't exists.
if_table_config_exists (str): Optional.
What to do if config files already exist
* 'raise': Raises FileExistError
* 'replace': Replace with blank template
* 'pass'; Do nothing
if_storage_data_exists (str): Optional.
What to do if data already exists on your bucket:
* 'raise' : Raises Conflict exception
* 'replace' : Replace table
* 'pass' : Do nothing
source_format (str): Optional
Data source format. Only 'csv', 'avro' and 'parquet'
are supported. Defaults to 'csv'.
force_columns (bool): Optional.
If set to `True`, overwrite CKAN's columns with the ones provi
ded.
If set to `False`, keep CKAN's columns instead of the ones pro
vided.
columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL.
Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats.
Google sheets URL must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>.
dataset_is_public (bool): Control if prod dataset is public or not. By default staging datasets like `dataset_id_staging` are not public.
location (str): Optional. Location of dataset data.
List of possible region names locations: https://cloud.google.com/bigquery/docs/locations
chunk_size (int): Optional
The size of a chunk of data whenever iterating (in bytes).
This must be a multiple of 256 KB per the API specification.
If not specified, the chunk_size of the blob itself is used. If that is not specified, a default value of 40 MB is used.
"""
if path is None:
# Look if table data already exists at Storage
data = self.client["storage_staging"].list_blobs(
self.bucket_name, prefix=f"staging/{self.dataset_id}/{self.table_id}"
)
# Raise: Cannot create table without external data
if not data:
raise BaseDosDadosException(
"You must provide a path for uploading data"
)
# Add data to storage
if isinstance(
path,
(
str,
Path,
),
):
Storage(self.dataset_id, self.table_id, **self.main_vars).upload(
path,
mode="staging",
if_exists=if_storage_data_exists,
chunk_size=chunk_size,
)
# Create Dataset if it doesn't exist
if force_dataset:
dataset_obj = Dataset(self.dataset_id, **self.main_vars)
try:
dataset_obj.init()
except FileExistsError:
pass
dataset_obj.create(
if_exists="pass", location=location, dataset_is_public=dataset_is_public
)
self.init(
data_sample_path=path,
if_folder_exists="replace",
if_table_config_exists=if_table_config_exists,
columns_config_url_or_path=columns_config_url_or_path,
source_format=source_format,
force_columns=force_columns
)
table = bigquery.Table(self.table_full_name["staging"])
table.external_data_configuration = Datatype(
self, source_format, "staging", partitioned=self._is_partitioned()
).external_config
# Lookup if table alreay exists
table_ref = None
try:
table_ref = self.client["bigquery_staging"].get_table(
self.table_full_name["staging"]
)
except google.api_core.exceptions.NotFound:
pass
if isinstance(table_ref, google.cloud.bigquery.table.Table):
if if_table_exists == "pass":
return None
if if_table_exists == "raise":
raise FileExistsError(
"Table already exists, choose replace if you want to overwrite it"
)
if if_table_exists == "replace":
self.delete(mode="staging")
self.client["bigquery_staging"].create_table(table)
logger.success(
"{object} {object_id} was {action}!",
object_id=self.table_id,
object="Table",
action="created",
)
return None
def update(self, mode="all"):
"""Updates BigQuery schema and description.
Args:
mode (str): Optional.
Table of which table to update [prod|staging|all]
not_found_ok (bool): Optional.
What to do if table is not found
"""
self._check_mode(mode)
mode = ["prod", "staging"] if mode == "all" else [mode]
for m in mode:
try:
table = self._get_table_obj(m)
except google.api_core.exceptions.NotFound:
continue
# if m == "staging":
table.description = self._render_template(
Path("table/table_description.txt"), self.table_config
)
# save table description
with open(
self.metadata_path
/ self.dataset_id
/ self.table_id
/ "table_description.txt",
"w",
encoding="utf-8",
) as f:
f.write(table.description)
# when mode is staging the table schema already exists
table.schema = self._load_schema(m)
fields = ["description", "schema"] if m == "prod" else ["description"]
self.client[f"bigquery_{m}"].update_table(table, fields=fields)
logger.success(
" {object} {object_id} was {action}!",
object_id=self.table_id,
object="Table",
action="updated",
)
def publish(self, if_exists="raise"):
"""Creates BigQuery table at production dataset.
Table should be located at `<dataset_id>.<table_id>`.
It creates a view that uses the query from
`<metadata_path>/<dataset_id>/<table_id>/publish.sql`.
Make sure that all columns from the query also exists at
`<metadata_path>/<dataset_id>/<table_id>/table_config.sql`, including
the partitions.
Args:
if_exists (str): Optional.
What to do if table exists.
* 'raise' : Raises Conflict exception
* 'replace' : Replace table
* 'pass' : Do nothing
Todo:
* Check if all required fields are filled
"""
if if_exists == "replace":
self.delete(mode="prod")
self.client["bigquery_prod"].query(
(self.table_folder / "publish.sql").open("r", encoding="utf-8").read()
).result()
self.update()
logger.success(
" {object} {object_id} was {action}!",
object_id=self.table_id,
object="Table",
action="published",
)
def delete(self, mode):
"""Deletes table in BigQuery.
Args:
mode (str): Table of which table to delete [prod|staging]
"""
self._check_mode(mode)
if mode == "all":
for m, n in self.table_full_name[mode].items():
self.client[f"bigquery_{m}"].delete_table(n, not_found_ok=True)
logger.info(
" {object} {object_id}_{mode} was {action}!",
object_id=self.table_id,
mode=mode,
object="Table",
action="deleted",
)
else:
self.client[f"bigquery_{mode}"].delete_table(
self.table_full_name[mode], not_found_ok=True
)
logger.info(
" {object} {object_id}_{mode} was {action}!",
object_id=self.table_id,
mode=mode,
object="Table",
action="deleted",
)
def append(
self,
filepath,
partitions=None,
if_exists="replace",
chunk_size=None,
**upload_args,
):
"""Appends new data to existing BigQuery table.
As long as the data has the same schema. It appends the data in the
filepath to the existing table.
Args:
filepath (str or pathlib.PosixPath): Where to find the file that you want to upload to create a table with
partitions (str, pathlib.PosixPath, dict): Optional.
Hive structured partition as a string or dict
* str : `<key>=<value>/<key2>=<value2>`
* dict: `dict(key=value, key2=value2)`
if_exists (str): 0ptional.
What to do if data with same name exists in storage
* 'raise' : Raises Conflict exception
* 'replace' : Replace table
* 'pass' : Do nothing
chunk_size (int): Optional
The size of a chunk of data whenever iterating (in bytes).
This must be a multiple of 256 KB per the API specification.
If not specified, the chunk_size of the blob itself is used. If that is not specified, a default value of 40 MB is used.
"""
if not self.table_exists("staging"):
raise BaseDosDadosException(
"You cannot append to a table that does not exist"
)
Storage(self.dataset_id, self.table_id, **self.main_vars).upload(
filepath,
mode="staging",
partitions=partitions,
if_exists=if_exists,
chunk_size=chunk_size,
**upload_args,
)
logger.success(
" {object} {object_id} was {action}!",
object_id=self.table_id,
object="Table",
action="appended",
)
|
normal
|
{
"blob_id": "da218e6d9ee311eefb8e9ae4dac5053793eb5514",
"index": 9369,
"step-1": "<mask token>\n\n\nclass Table(Base):\n <mask token>\n\n def __init__(self, dataset_id, table_id, **kwargs):\n super().__init__(**kwargs)\n self.table_id = table_id.replace('-', '_')\n self.dataset_id = dataset_id.replace('-', '_')\n self.dataset_folder = Path(self.metadata_path / self.dataset_id)\n self.table_folder = self.dataset_folder / table_id\n self.table_full_name = dict(prod=\n f\"{self.client['bigquery_prod'].project}.{self.dataset_id}.{self.table_id}\"\n , staging=\n f\"{self.client['bigquery_staging'].project}.{self.dataset_id}_staging.{self.table_id}\"\n )\n self.table_full_name.update(dict(all=deepcopy(self.table_full_name)))\n self.metadata = Metadata(self.dataset_id, self.table_id, **kwargs)\n\n @property\n def table_config(self):\n \"\"\"\n Load table_config.yaml\n \"\"\"\n return self._load_yaml(self.table_folder / 'table_config.yaml')\n <mask token>\n <mask token>\n\n def _load_schema(self, mode='staging'):\n \"\"\"Load schema from table_config.yaml\n\n Args:\n mode (bool): Which dataset to create [prod|staging].\n \"\"\"\n self._check_mode(mode)\n json_path = self.table_folder / f'schema-{mode}.json'\n columns = self.table_config['columns']\n if mode == 'staging':\n new_columns = []\n for c in columns:\n is_in_staging = True if c.get('is_in_staging') is None else c[\n 'is_in_staging']\n if is_in_staging and not c.get('is_partition'):\n c['type'] = 'STRING'\n new_columns.append(c)\n del columns\n columns = new_columns\n elif mode == 'prod':\n schema = self._get_table_obj(mode).schema\n column_names = [c['name'] for c in columns]\n schema_names = [s.name for s in schema]\n not_in_columns = [name for name in schema_names if name not in\n column_names]\n not_in_schema = [name for name in column_names if name not in\n schema_names]\n if not_in_columns:\n raise BaseDosDadosException(\n 'Column {error_columns} was not found in table_config.yaml. Are you sure that all your column names between table_config.yaml, publish.sql and {project_id}.{dataset_id}.{table_id} are the same?'\n .format(error_columns=not_in_columns, project_id=self.\n table_config['project_id_prod'], dataset_id=self.\n table_config['dataset_id'], table_id=self.table_config[\n 'table_id']))\n if not_in_schema:\n raise BaseDosDadosException(\n 'Column {error_columns} was not found in publish.sql. Are you sure that all your column names between table_config.yaml, publish.sql and {project_id}.{dataset_id}.{table_id} are the same?'\n .format(error_columns=not_in_schema, project_id=self.\n table_config['project_id_prod'], dataset_id=self.\n table_config['dataset_id'], table_id=self.table_config[\n 'table_id']))\n for c in columns:\n for s in schema:\n if c['name'] == s.name:\n c['type'] = s.field_type\n c['mode'] = s.mode\n break\n json.dump(columns, json_path.open('w', encoding='utf-8'))\n return self.client[f'bigquery_{mode}'].schema_from_json(str(json_path))\n\n def _make_publish_sql(self):\n \"\"\"Create publish.sql with columns and bigquery_type\"\"\"\n publish_txt = \"\"\"\n /*\n Query para publicar a tabela.\n\n Esse é o lugar para:\n - modificar nomes, ordem e tipos de colunas\n - dar join com outras tabelas\n - criar colunas extras (e.g. logs, proporções, etc.)\n\n Qualquer coluna definida aqui deve também existir em `table_config.yaml`.\n\n # Além disso, sinta-se à vontade para alterar alguns nomes obscuros\n # para algo um pouco mais explícito.\n\n TIPOS:\n - Para modificar tipos de colunas, basta substituir STRING por outro tipo válido.\n - Exemplo: `SAFE_CAST(column_name AS NUMERIC) column_name`\n - Mais detalhes: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types\n */\n \"\"\"\n publish_txt = inspect.cleandoc(publish_txt)\n publish_txt = textwrap.dedent(publish_txt)\n project_id_prod = self.client['bigquery_prod'].project\n publish_txt += f\"\"\"\n\nCREATE VIEW {project_id_prod}.{self.dataset_id}.{self.table_id} AS\nSELECT \n\"\"\"\n if self._is_partitioned():\n columns = sorted(self.table_config['columns'], key=lambda k: (k\n ['is_partition'] is not None, k['is_partition']), reverse=True)\n else:\n columns = self.table_config['columns']\n for col in columns:\n name = col['name']\n bigquery_type = 'STRING' if col['bigquery_type'] is None else col[\n 'bigquery_type'].upper()\n publish_txt += f'SAFE_CAST({name} AS {bigquery_type}) {name},\\n'\n publish_txt = publish_txt[:-2] + '\\n'\n project_id_staging = self.client['bigquery_staging'].project\n publish_txt += (\n f'FROM {project_id_staging}.{self.dataset_id}_staging.{self.table_id} AS t'\n )\n (self.table_folder / 'publish.sql').open('w', encoding='utf-8').write(\n publish_txt)\n <mask token>\n\n @staticmethod\n def _sheet_to_df(columns_config_url_or_path):\n \"\"\"\n Convert sheet to dataframe\n \"\"\"\n url = columns_config_url_or_path.replace('edit#gid=',\n 'export?format=csv&gid=')\n try:\n return pd.read_csv(StringIO(requests.get(url, timeout=10).\n content.decode('utf-8')))\n except Exception as e:\n raise BaseDosDadosException(\n 'Check if your google sheet Share are: Anyone on the internet with this link can view'\n ) from e\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def update(self, mode='all'):\n \"\"\"Updates BigQuery schema and description.\n Args:\n mode (str): Optional.\n Table of which table to update [prod|staging|all]\n not_found_ok (bool): Optional.\n What to do if table is not found\n \"\"\"\n self._check_mode(mode)\n mode = ['prod', 'staging'] if mode == 'all' else [mode]\n for m in mode:\n try:\n table = self._get_table_obj(m)\n except google.api_core.exceptions.NotFound:\n continue\n table.description = self._render_template(Path(\n 'table/table_description.txt'), self.table_config)\n with open(self.metadata_path / self.dataset_id / self.table_id /\n 'table_description.txt', 'w', encoding='utf-8') as f:\n f.write(table.description)\n table.schema = self._load_schema(m)\n fields = ['description', 'schema'] if m == 'prod' else [\n 'description']\n self.client[f'bigquery_{m}'].update_table(table, fields=fields)\n logger.success(' {object} {object_id} was {action}!', object_id=\n self.table_id, object='Table', action='updated')\n <mask token>\n <mask token>\n\n def append(self, filepath, partitions=None, if_exists='replace',\n chunk_size=None, **upload_args):\n \"\"\"Appends new data to existing BigQuery table.\n\n As long as the data has the same schema. It appends the data in the\n filepath to the existing table.\n\n Args:\n filepath (str or pathlib.PosixPath): Where to find the file that you want to upload to create a table with\n partitions (str, pathlib.PosixPath, dict): Optional.\n Hive structured partition as a string or dict\n\n * str : `<key>=<value>/<key2>=<value2>`\n * dict: `dict(key=value, key2=value2)`\n if_exists (str): 0ptional.\n What to do if data with same name exists in storage\n\n * 'raise' : Raises Conflict exception\n * 'replace' : Replace table\n * 'pass' : Do nothing\n chunk_size (int): Optional\n The size of a chunk of data whenever iterating (in bytes).\n This must be a multiple of 256 KB per the API specification.\n If not specified, the chunk_size of the blob itself is used. If that is not specified, a default value of 40 MB is used.\n \"\"\"\n if not self.table_exists('staging'):\n raise BaseDosDadosException(\n 'You cannot append to a table that does not exist')\n Storage(self.dataset_id, self.table_id, **self.main_vars).upload(\n filepath, mode='staging', partitions=partitions, if_exists=\n if_exists, chunk_size=chunk_size, **upload_args)\n logger.success(' {object} {object_id} was {action}!', object_id=\n self.table_id, object='Table', action='appended')\n",
"step-2": "<mask token>\n\n\nclass Table(Base):\n <mask token>\n\n def __init__(self, dataset_id, table_id, **kwargs):\n super().__init__(**kwargs)\n self.table_id = table_id.replace('-', '_')\n self.dataset_id = dataset_id.replace('-', '_')\n self.dataset_folder = Path(self.metadata_path / self.dataset_id)\n self.table_folder = self.dataset_folder / table_id\n self.table_full_name = dict(prod=\n f\"{self.client['bigquery_prod'].project}.{self.dataset_id}.{self.table_id}\"\n , staging=\n f\"{self.client['bigquery_staging'].project}.{self.dataset_id}_staging.{self.table_id}\"\n )\n self.table_full_name.update(dict(all=deepcopy(self.table_full_name)))\n self.metadata = Metadata(self.dataset_id, self.table_id, **kwargs)\n\n @property\n def table_config(self):\n \"\"\"\n Load table_config.yaml\n \"\"\"\n return self._load_yaml(self.table_folder / 'table_config.yaml')\n\n def _get_table_obj(self, mode):\n \"\"\"\n Get table object from BigQuery\n \"\"\"\n return self.client[f'bigquery_{mode}'].get_table(self.\n table_full_name[mode])\n\n def _is_partitioned(self):\n \"\"\"\n Check if table is partitioned\n \"\"\"\n partitions = self.table_config['partitions']\n if partitions is None or len(partitions) == 0:\n return False\n if isinstance(partitions, list):\n return all(item is not None for item in partitions)\n raise ValueError('Partitions must be a list or None')\n\n def _load_schema(self, mode='staging'):\n \"\"\"Load schema from table_config.yaml\n\n Args:\n mode (bool): Which dataset to create [prod|staging].\n \"\"\"\n self._check_mode(mode)\n json_path = self.table_folder / f'schema-{mode}.json'\n columns = self.table_config['columns']\n if mode == 'staging':\n new_columns = []\n for c in columns:\n is_in_staging = True if c.get('is_in_staging') is None else c[\n 'is_in_staging']\n if is_in_staging and not c.get('is_partition'):\n c['type'] = 'STRING'\n new_columns.append(c)\n del columns\n columns = new_columns\n elif mode == 'prod':\n schema = self._get_table_obj(mode).schema\n column_names = [c['name'] for c in columns]\n schema_names = [s.name for s in schema]\n not_in_columns = [name for name in schema_names if name not in\n column_names]\n not_in_schema = [name for name in column_names if name not in\n schema_names]\n if not_in_columns:\n raise BaseDosDadosException(\n 'Column {error_columns} was not found in table_config.yaml. Are you sure that all your column names between table_config.yaml, publish.sql and {project_id}.{dataset_id}.{table_id} are the same?'\n .format(error_columns=not_in_columns, project_id=self.\n table_config['project_id_prod'], dataset_id=self.\n table_config['dataset_id'], table_id=self.table_config[\n 'table_id']))\n if not_in_schema:\n raise BaseDosDadosException(\n 'Column {error_columns} was not found in publish.sql. Are you sure that all your column names between table_config.yaml, publish.sql and {project_id}.{dataset_id}.{table_id} are the same?'\n .format(error_columns=not_in_schema, project_id=self.\n table_config['project_id_prod'], dataset_id=self.\n table_config['dataset_id'], table_id=self.table_config[\n 'table_id']))\n for c in columns:\n for s in schema:\n if c['name'] == s.name:\n c['type'] = s.field_type\n c['mode'] = s.mode\n break\n json.dump(columns, json_path.open('w', encoding='utf-8'))\n return self.client[f'bigquery_{mode}'].schema_from_json(str(json_path))\n\n def _make_publish_sql(self):\n \"\"\"Create publish.sql with columns and bigquery_type\"\"\"\n publish_txt = \"\"\"\n /*\n Query para publicar a tabela.\n\n Esse é o lugar para:\n - modificar nomes, ordem e tipos de colunas\n - dar join com outras tabelas\n - criar colunas extras (e.g. logs, proporções, etc.)\n\n Qualquer coluna definida aqui deve também existir em `table_config.yaml`.\n\n # Além disso, sinta-se à vontade para alterar alguns nomes obscuros\n # para algo um pouco mais explícito.\n\n TIPOS:\n - Para modificar tipos de colunas, basta substituir STRING por outro tipo válido.\n - Exemplo: `SAFE_CAST(column_name AS NUMERIC) column_name`\n - Mais detalhes: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types\n */\n \"\"\"\n publish_txt = inspect.cleandoc(publish_txt)\n publish_txt = textwrap.dedent(publish_txt)\n project_id_prod = self.client['bigquery_prod'].project\n publish_txt += f\"\"\"\n\nCREATE VIEW {project_id_prod}.{self.dataset_id}.{self.table_id} AS\nSELECT \n\"\"\"\n if self._is_partitioned():\n columns = sorted(self.table_config['columns'], key=lambda k: (k\n ['is_partition'] is not None, k['is_partition']), reverse=True)\n else:\n columns = self.table_config['columns']\n for col in columns:\n name = col['name']\n bigquery_type = 'STRING' if col['bigquery_type'] is None else col[\n 'bigquery_type'].upper()\n publish_txt += f'SAFE_CAST({name} AS {bigquery_type}) {name},\\n'\n publish_txt = publish_txt[:-2] + '\\n'\n project_id_staging = self.client['bigquery_staging'].project\n publish_txt += (\n f'FROM {project_id_staging}.{self.dataset_id}_staging.{self.table_id} AS t'\n )\n (self.table_folder / 'publish.sql').open('w', encoding='utf-8').write(\n publish_txt)\n <mask token>\n\n @staticmethod\n def _sheet_to_df(columns_config_url_or_path):\n \"\"\"\n Convert sheet to dataframe\n \"\"\"\n url = columns_config_url_or_path.replace('edit#gid=',\n 'export?format=csv&gid=')\n try:\n return pd.read_csv(StringIO(requests.get(url, timeout=10).\n content.decode('utf-8')))\n except Exception as e:\n raise BaseDosDadosException(\n 'Check if your google sheet Share are: Anyone on the internet with this link can view'\n ) from e\n\n def table_exists(self, mode):\n \"\"\"Check if table exists in BigQuery.\n\n Args:\n mode (str): Which dataset to check [prod|staging].\n \"\"\"\n try:\n ref = self._get_table_obj(mode=mode)\n except google.api_core.exceptions.NotFound:\n ref = None\n return bool(ref)\n <mask token>\n <mask token>\n <mask token>\n\n def update(self, mode='all'):\n \"\"\"Updates BigQuery schema and description.\n Args:\n mode (str): Optional.\n Table of which table to update [prod|staging|all]\n not_found_ok (bool): Optional.\n What to do if table is not found\n \"\"\"\n self._check_mode(mode)\n mode = ['prod', 'staging'] if mode == 'all' else [mode]\n for m in mode:\n try:\n table = self._get_table_obj(m)\n except google.api_core.exceptions.NotFound:\n continue\n table.description = self._render_template(Path(\n 'table/table_description.txt'), self.table_config)\n with open(self.metadata_path / self.dataset_id / self.table_id /\n 'table_description.txt', 'w', encoding='utf-8') as f:\n f.write(table.description)\n table.schema = self._load_schema(m)\n fields = ['description', 'schema'] if m == 'prod' else [\n 'description']\n self.client[f'bigquery_{m}'].update_table(table, fields=fields)\n logger.success(' {object} {object_id} was {action}!', object_id=\n self.table_id, object='Table', action='updated')\n\n def publish(self, if_exists='raise'):\n \"\"\"Creates BigQuery table at production dataset.\n\n Table should be located at `<dataset_id>.<table_id>`.\n\n It creates a view that uses the query from\n `<metadata_path>/<dataset_id>/<table_id>/publish.sql`.\n\n Make sure that all columns from the query also exists at\n `<metadata_path>/<dataset_id>/<table_id>/table_config.sql`, including\n the partitions.\n\n Args:\n if_exists (str): Optional.\n What to do if table exists.\n\n * 'raise' : Raises Conflict exception\n * 'replace' : Replace table\n * 'pass' : Do nothing\n\n Todo:\n\n * Check if all required fields are filled\n \"\"\"\n if if_exists == 'replace':\n self.delete(mode='prod')\n self.client['bigquery_prod'].query((self.table_folder /\n 'publish.sql').open('r', encoding='utf-8').read()).result()\n self.update()\n logger.success(' {object} {object_id} was {action}!', object_id=\n self.table_id, object='Table', action='published')\n <mask token>\n\n def append(self, filepath, partitions=None, if_exists='replace',\n chunk_size=None, **upload_args):\n \"\"\"Appends new data to existing BigQuery table.\n\n As long as the data has the same schema. It appends the data in the\n filepath to the existing table.\n\n Args:\n filepath (str or pathlib.PosixPath): Where to find the file that you want to upload to create a table with\n partitions (str, pathlib.PosixPath, dict): Optional.\n Hive structured partition as a string or dict\n\n * str : `<key>=<value>/<key2>=<value2>`\n * dict: `dict(key=value, key2=value2)`\n if_exists (str): 0ptional.\n What to do if data with same name exists in storage\n\n * 'raise' : Raises Conflict exception\n * 'replace' : Replace table\n * 'pass' : Do nothing\n chunk_size (int): Optional\n The size of a chunk of data whenever iterating (in bytes).\n This must be a multiple of 256 KB per the API specification.\n If not specified, the chunk_size of the blob itself is used. If that is not specified, a default value of 40 MB is used.\n \"\"\"\n if not self.table_exists('staging'):\n raise BaseDosDadosException(\n 'You cannot append to a table that does not exist')\n Storage(self.dataset_id, self.table_id, **self.main_vars).upload(\n filepath, mode='staging', partitions=partitions, if_exists=\n if_exists, chunk_size=chunk_size, **upload_args)\n logger.success(' {object} {object_id} was {action}!', object_id=\n self.table_id, object='Table', action='appended')\n",
"step-3": "<mask token>\n\n\nclass Table(Base):\n <mask token>\n\n def __init__(self, dataset_id, table_id, **kwargs):\n super().__init__(**kwargs)\n self.table_id = table_id.replace('-', '_')\n self.dataset_id = dataset_id.replace('-', '_')\n self.dataset_folder = Path(self.metadata_path / self.dataset_id)\n self.table_folder = self.dataset_folder / table_id\n self.table_full_name = dict(prod=\n f\"{self.client['bigquery_prod'].project}.{self.dataset_id}.{self.table_id}\"\n , staging=\n f\"{self.client['bigquery_staging'].project}.{self.dataset_id}_staging.{self.table_id}\"\n )\n self.table_full_name.update(dict(all=deepcopy(self.table_full_name)))\n self.metadata = Metadata(self.dataset_id, self.table_id, **kwargs)\n\n @property\n def table_config(self):\n \"\"\"\n Load table_config.yaml\n \"\"\"\n return self._load_yaml(self.table_folder / 'table_config.yaml')\n\n def _get_table_obj(self, mode):\n \"\"\"\n Get table object from BigQuery\n \"\"\"\n return self.client[f'bigquery_{mode}'].get_table(self.\n table_full_name[mode])\n\n def _is_partitioned(self):\n \"\"\"\n Check if table is partitioned\n \"\"\"\n partitions = self.table_config['partitions']\n if partitions is None or len(partitions) == 0:\n return False\n if isinstance(partitions, list):\n return all(item is not None for item in partitions)\n raise ValueError('Partitions must be a list or None')\n\n def _load_schema(self, mode='staging'):\n \"\"\"Load schema from table_config.yaml\n\n Args:\n mode (bool): Which dataset to create [prod|staging].\n \"\"\"\n self._check_mode(mode)\n json_path = self.table_folder / f'schema-{mode}.json'\n columns = self.table_config['columns']\n if mode == 'staging':\n new_columns = []\n for c in columns:\n is_in_staging = True if c.get('is_in_staging') is None else c[\n 'is_in_staging']\n if is_in_staging and not c.get('is_partition'):\n c['type'] = 'STRING'\n new_columns.append(c)\n del columns\n columns = new_columns\n elif mode == 'prod':\n schema = self._get_table_obj(mode).schema\n column_names = [c['name'] for c in columns]\n schema_names = [s.name for s in schema]\n not_in_columns = [name for name in schema_names if name not in\n column_names]\n not_in_schema = [name for name in column_names if name not in\n schema_names]\n if not_in_columns:\n raise BaseDosDadosException(\n 'Column {error_columns} was not found in table_config.yaml. Are you sure that all your column names between table_config.yaml, publish.sql and {project_id}.{dataset_id}.{table_id} are the same?'\n .format(error_columns=not_in_columns, project_id=self.\n table_config['project_id_prod'], dataset_id=self.\n table_config['dataset_id'], table_id=self.table_config[\n 'table_id']))\n if not_in_schema:\n raise BaseDosDadosException(\n 'Column {error_columns} was not found in publish.sql. Are you sure that all your column names between table_config.yaml, publish.sql and {project_id}.{dataset_id}.{table_id} are the same?'\n .format(error_columns=not_in_schema, project_id=self.\n table_config['project_id_prod'], dataset_id=self.\n table_config['dataset_id'], table_id=self.table_config[\n 'table_id']))\n for c in columns:\n for s in schema:\n if c['name'] == s.name:\n c['type'] = s.field_type\n c['mode'] = s.mode\n break\n json.dump(columns, json_path.open('w', encoding='utf-8'))\n return self.client[f'bigquery_{mode}'].schema_from_json(str(json_path))\n\n def _make_publish_sql(self):\n \"\"\"Create publish.sql with columns and bigquery_type\"\"\"\n publish_txt = \"\"\"\n /*\n Query para publicar a tabela.\n\n Esse é o lugar para:\n - modificar nomes, ordem e tipos de colunas\n - dar join com outras tabelas\n - criar colunas extras (e.g. logs, proporções, etc.)\n\n Qualquer coluna definida aqui deve também existir em `table_config.yaml`.\n\n # Além disso, sinta-se à vontade para alterar alguns nomes obscuros\n # para algo um pouco mais explícito.\n\n TIPOS:\n - Para modificar tipos de colunas, basta substituir STRING por outro tipo válido.\n - Exemplo: `SAFE_CAST(column_name AS NUMERIC) column_name`\n - Mais detalhes: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types\n */\n \"\"\"\n publish_txt = inspect.cleandoc(publish_txt)\n publish_txt = textwrap.dedent(publish_txt)\n project_id_prod = self.client['bigquery_prod'].project\n publish_txt += f\"\"\"\n\nCREATE VIEW {project_id_prod}.{self.dataset_id}.{self.table_id} AS\nSELECT \n\"\"\"\n if self._is_partitioned():\n columns = sorted(self.table_config['columns'], key=lambda k: (k\n ['is_partition'] is not None, k['is_partition']), reverse=True)\n else:\n columns = self.table_config['columns']\n for col in columns:\n name = col['name']\n bigquery_type = 'STRING' if col['bigquery_type'] is None else col[\n 'bigquery_type'].upper()\n publish_txt += f'SAFE_CAST({name} AS {bigquery_type}) {name},\\n'\n publish_txt = publish_txt[:-2] + '\\n'\n project_id_staging = self.client['bigquery_staging'].project\n publish_txt += (\n f'FROM {project_id_staging}.{self.dataset_id}_staging.{self.table_id} AS t'\n )\n (self.table_folder / 'publish.sql').open('w', encoding='utf-8').write(\n publish_txt)\n\n def _make_template(self, columns, partition_columns,\n if_table_config_exists, force_columns):\n self.metadata.create(if_exists=if_table_config_exists, columns=\n partition_columns + columns, partition_columns=\n partition_columns, force_columns=force_columns, table_only=False)\n self._make_publish_sql()\n\n @staticmethod\n def _sheet_to_df(columns_config_url_or_path):\n \"\"\"\n Convert sheet to dataframe\n \"\"\"\n url = columns_config_url_or_path.replace('edit#gid=',\n 'export?format=csv&gid=')\n try:\n return pd.read_csv(StringIO(requests.get(url, timeout=10).\n content.decode('utf-8')))\n except Exception as e:\n raise BaseDosDadosException(\n 'Check if your google sheet Share are: Anyone on the internet with this link can view'\n ) from e\n\n def table_exists(self, mode):\n \"\"\"Check if table exists in BigQuery.\n\n Args:\n mode (str): Which dataset to check [prod|staging].\n \"\"\"\n try:\n ref = self._get_table_obj(mode=mode)\n except google.api_core.exceptions.NotFound:\n ref = None\n return bool(ref)\n\n def update_columns(self, columns_config_url_or_path=None):\n \"\"\"\n Fills columns in table_config.yaml automatically using a public google sheets URL or a local file. Also regenerate\n publish.sql and autofill type using bigquery_type.\n\n The sheet must contain the columns:\n - name: column name\n - description: column description\n - bigquery_type: column bigquery type\n - measurement_unit: column mesurement unit\n - covered_by_dictionary: column related dictionary\n - directory_column: column related directory in the format <dataset_id>.<table_id>:<column_name>\n - temporal_coverage: column temporal coverage\n - has_sensitive_data: the column has sensitive data\n - observations: column observations\n Args:\n columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL.\n Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats.\n Google sheets URL must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>.\n\n \"\"\"\n ruamel = ryaml.YAML()\n ruamel.preserve_quotes = True\n ruamel.indent(mapping=4, sequence=6, offset=4)\n table_config_yaml = ruamel.load((self.table_folder /\n 'table_config.yaml').open(encoding='utf-8'))\n if ('https://docs.google.com/spreadsheets/d/' in\n columns_config_url_or_path):\n if ('edit#gid=' not in columns_config_url_or_path or \n 'https://docs.google.com/spreadsheets/d/' not in\n columns_config_url_or_path or not\n columns_config_url_or_path.split('=')[1].isdigit()):\n raise BaseDosDadosException(\n 'The Google sheet url not in correct format.The url must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>'\n )\n df = self._sheet_to_df(columns_config_url_or_path)\n else:\n file_type = columns_config_url_or_path.split('.')[-1]\n if file_type == 'csv':\n df = pd.read_csv(columns_config_url_or_path, encoding='utf-8')\n elif file_type in ['xls', 'xlsx', 'xlsm', 'xlsb', 'odf', 'ods',\n 'odt']:\n df = pd.read_excel(columns_config_url_or_path)\n else:\n raise BaseDosDadosException(\n 'File not suported. Only csv, xls, xlsx, xlsm, xlsb, odf, ods, odt are supported.'\n )\n df = df.fillna('NULL')\n required_columns = ['name', 'bigquery_type', 'description',\n 'temporal_coverage', 'covered_by_dictionary',\n 'directory_column', 'measurement_unit', 'has_sensitive_data',\n 'observations']\n not_found_columns = required_columns.copy()\n for sheet_column in df.columns.tolist():\n for required_column in required_columns:\n if sheet_column == required_column:\n not_found_columns.remove(required_column)\n if not_found_columns:\n raise BaseDosDadosException(\n f\"The following required columns are not found: {', '.join(not_found_columns)}.\"\n )\n columns_parameters = zip(*[df[required_column].tolist() for\n required_column in required_columns])\n for name, bigquery_type, description, temporal_coverage, covered_by_dictionary, directory_column, measurement_unit, has_sensitive_data, observations in columns_parameters:\n for col in table_config_yaml['columns']:\n if col['name'] == name:\n col['bigquery_type'] = col['bigquery_type'\n ] if bigquery_type == 'NULL' else bigquery_type.lower()\n col['description'] = col['description'\n ] if description == 'NULL' else description\n col['temporal_coverage'] = col['temporal_coverage'\n ] if temporal_coverage == 'NULL' else [\n temporal_coverage]\n col['covered_by_dictionary'] = ('no' if \n covered_by_dictionary == 'NULL' else\n covered_by_dictionary)\n dataset = directory_column.split('.')[0]\n col['directory_column']['dataset_id'] = col[\n 'directory_column']['dataset_id'\n ] if dataset == 'NULL' else dataset\n table = directory_column.split('.')[-1].split(':')[0]\n col['directory_column']['table_id'] = col[\n 'directory_column']['table_id'\n ] if table == 'NULL' else table\n column = directory_column.split('.')[-1].split(':')[-1]\n col['directory_column']['column_name'] = col[\n 'directory_column']['column_name'\n ] if column == 'NULL' else column\n col['measurement_unit'] = col['measurement_unit'\n ] if measurement_unit == 'NULL' else measurement_unit\n col['has_sensitive_data'] = ('no' if has_sensitive_data ==\n 'NULL' else has_sensitive_data)\n col['observations'] = col['observations'\n ] if observations == 'NULL' else observations\n with open(self.table_folder / 'table_config.yaml', 'w', encoding=\n 'utf-8') as f:\n ruamel.dump(table_config_yaml, f)\n self._make_publish_sql()\n\n def init(self, data_sample_path=None, if_folder_exists='raise',\n if_table_config_exists='raise', source_format='csv', force_columns=\n False, columns_config_url_or_path=None):\n \"\"\"Initialize table folder at metadata_path at `metadata_path/<dataset_id>/<table_id>`.\n\n The folder should contain:\n\n * `table_config.yaml`\n * `publish.sql`\n\n You can also point to a sample of the data to auto complete columns names.\n\n Args:\n data_sample_path (str, pathlib.PosixPath): Optional.\n Data sample path to auto complete columns names\n It supports Comma Delimited CSV, Apache Avro and\n Apache Parquet.\n if_folder_exists (str): Optional.\n What to do if table folder exists\n\n * 'raise' : Raises FileExistsError\n * 'replace' : Replace folder\n * 'pass' : Do nothing\n if_table_config_exists (str): Optional\n What to do if table_config.yaml and publish.sql exists\n\n * 'raise' : Raises FileExistsError\n * 'replace' : Replace files with blank template\n * 'pass' : Do nothing\n source_format (str): Optional\n Data source format. Only 'csv', 'avro' and 'parquet'\n are supported. Defaults to 'csv'.\n force_columns (bool): Optional.\n If set to `True`, overwrite CKAN's columns with the ones provi\n ded.\n If set to `False`, keep CKAN's columns instead of the ones pro\n vided.\n columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL.\n Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats.\n Google sheets URL must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>.\n\n Raises:\n FileExistsError: If folder exists and replace is False.\n NotImplementedError: If data sample is not in supported type or format.\n \"\"\"\n if not self.dataset_folder.exists():\n raise FileExistsError(\n f'Dataset folder {self.dataset_folder} folder does not exists. Create a dataset before adding tables.'\n )\n try:\n self.table_folder.mkdir(exist_ok=if_folder_exists == 'replace')\n except FileExistsError as e:\n if if_folder_exists == 'raise':\n raise FileExistsError(\n f'Table folder already exists for {self.table_id}. '\n ) from e\n if if_folder_exists == 'pass':\n return self\n if not data_sample_path and if_table_config_exists != 'pass':\n raise BaseDosDadosException(\n 'You must provide a path to correctly create config files')\n partition_columns = []\n if isinstance(data_sample_path, (str, Path)):\n data_sample_path = Path(data_sample_path)\n if data_sample_path.is_dir():\n data_sample_path = [f for f in data_sample_path.glob('**/*'\n ) if f.is_file() and f.suffix == f'.{source_format}'][0]\n partition_columns = [k.split('=')[0] for k in\n data_sample_path.as_posix().split('/') if '=' in k]\n columns = Datatype(self, source_format).header(data_sample_path)\n else:\n columns = ['column_name']\n if if_table_config_exists == 'pass':\n if Path(self.table_folder / 'table_config.yaml').is_file(\n ) and Path(self.table_folder / 'publish.sql').is_file():\n pass\n elif not data_sample_path:\n raise BaseDosDadosException(\n 'You must provide a path to correctly create config files')\n else:\n self._make_template(columns, partition_columns,\n if_table_config_exists, force_columns=force_columns)\n elif if_table_config_exists == 'raise':\n if Path(self.table_folder / 'table_config.yaml').is_file(\n ) and Path(self.table_folder / 'publish.sql').is_file():\n raise FileExistsError(\n f'table_config.yaml and publish.sql already exists at {self.table_folder}'\n )\n self._make_template(columns, partition_columns,\n if_table_config_exists, force_columns=force_columns)\n else:\n self._make_template(columns, partition_columns,\n if_table_config_exists, force_columns=force_columns)\n if columns_config_url_or_path is not None:\n self.update_columns(columns_config_url_or_path)\n return self\n <mask token>\n\n def update(self, mode='all'):\n \"\"\"Updates BigQuery schema and description.\n Args:\n mode (str): Optional.\n Table of which table to update [prod|staging|all]\n not_found_ok (bool): Optional.\n What to do if table is not found\n \"\"\"\n self._check_mode(mode)\n mode = ['prod', 'staging'] if mode == 'all' else [mode]\n for m in mode:\n try:\n table = self._get_table_obj(m)\n except google.api_core.exceptions.NotFound:\n continue\n table.description = self._render_template(Path(\n 'table/table_description.txt'), self.table_config)\n with open(self.metadata_path / self.dataset_id / self.table_id /\n 'table_description.txt', 'w', encoding='utf-8') as f:\n f.write(table.description)\n table.schema = self._load_schema(m)\n fields = ['description', 'schema'] if m == 'prod' else [\n 'description']\n self.client[f'bigquery_{m}'].update_table(table, fields=fields)\n logger.success(' {object} {object_id} was {action}!', object_id=\n self.table_id, object='Table', action='updated')\n\n def publish(self, if_exists='raise'):\n \"\"\"Creates BigQuery table at production dataset.\n\n Table should be located at `<dataset_id>.<table_id>`.\n\n It creates a view that uses the query from\n `<metadata_path>/<dataset_id>/<table_id>/publish.sql`.\n\n Make sure that all columns from the query also exists at\n `<metadata_path>/<dataset_id>/<table_id>/table_config.sql`, including\n the partitions.\n\n Args:\n if_exists (str): Optional.\n What to do if table exists.\n\n * 'raise' : Raises Conflict exception\n * 'replace' : Replace table\n * 'pass' : Do nothing\n\n Todo:\n\n * Check if all required fields are filled\n \"\"\"\n if if_exists == 'replace':\n self.delete(mode='prod')\n self.client['bigquery_prod'].query((self.table_folder /\n 'publish.sql').open('r', encoding='utf-8').read()).result()\n self.update()\n logger.success(' {object} {object_id} was {action}!', object_id=\n self.table_id, object='Table', action='published')\n <mask token>\n\n def append(self, filepath, partitions=None, if_exists='replace',\n chunk_size=None, **upload_args):\n \"\"\"Appends new data to existing BigQuery table.\n\n As long as the data has the same schema. It appends the data in the\n filepath to the existing table.\n\n Args:\n filepath (str or pathlib.PosixPath): Where to find the file that you want to upload to create a table with\n partitions (str, pathlib.PosixPath, dict): Optional.\n Hive structured partition as a string or dict\n\n * str : `<key>=<value>/<key2>=<value2>`\n * dict: `dict(key=value, key2=value2)`\n if_exists (str): 0ptional.\n What to do if data with same name exists in storage\n\n * 'raise' : Raises Conflict exception\n * 'replace' : Replace table\n * 'pass' : Do nothing\n chunk_size (int): Optional\n The size of a chunk of data whenever iterating (in bytes).\n This must be a multiple of 256 KB per the API specification.\n If not specified, the chunk_size of the blob itself is used. If that is not specified, a default value of 40 MB is used.\n \"\"\"\n if not self.table_exists('staging'):\n raise BaseDosDadosException(\n 'You cannot append to a table that does not exist')\n Storage(self.dataset_id, self.table_id, **self.main_vars).upload(\n filepath, mode='staging', partitions=partitions, if_exists=\n if_exists, chunk_size=chunk_size, **upload_args)\n logger.success(' {object} {object_id} was {action}!', object_id=\n self.table_id, object='Table', action='appended')\n",
"step-4": "<mask token>\n\n\nclass Table(Base):\n <mask token>\n\n def __init__(self, dataset_id, table_id, **kwargs):\n super().__init__(**kwargs)\n self.table_id = table_id.replace('-', '_')\n self.dataset_id = dataset_id.replace('-', '_')\n self.dataset_folder = Path(self.metadata_path / self.dataset_id)\n self.table_folder = self.dataset_folder / table_id\n self.table_full_name = dict(prod=\n f\"{self.client['bigquery_prod'].project}.{self.dataset_id}.{self.table_id}\"\n , staging=\n f\"{self.client['bigquery_staging'].project}.{self.dataset_id}_staging.{self.table_id}\"\n )\n self.table_full_name.update(dict(all=deepcopy(self.table_full_name)))\n self.metadata = Metadata(self.dataset_id, self.table_id, **kwargs)\n\n @property\n def table_config(self):\n \"\"\"\n Load table_config.yaml\n \"\"\"\n return self._load_yaml(self.table_folder / 'table_config.yaml')\n\n def _get_table_obj(self, mode):\n \"\"\"\n Get table object from BigQuery\n \"\"\"\n return self.client[f'bigquery_{mode}'].get_table(self.\n table_full_name[mode])\n\n def _is_partitioned(self):\n \"\"\"\n Check if table is partitioned\n \"\"\"\n partitions = self.table_config['partitions']\n if partitions is None or len(partitions) == 0:\n return False\n if isinstance(partitions, list):\n return all(item is not None for item in partitions)\n raise ValueError('Partitions must be a list or None')\n\n def _load_schema(self, mode='staging'):\n \"\"\"Load schema from table_config.yaml\n\n Args:\n mode (bool): Which dataset to create [prod|staging].\n \"\"\"\n self._check_mode(mode)\n json_path = self.table_folder / f'schema-{mode}.json'\n columns = self.table_config['columns']\n if mode == 'staging':\n new_columns = []\n for c in columns:\n is_in_staging = True if c.get('is_in_staging') is None else c[\n 'is_in_staging']\n if is_in_staging and not c.get('is_partition'):\n c['type'] = 'STRING'\n new_columns.append(c)\n del columns\n columns = new_columns\n elif mode == 'prod':\n schema = self._get_table_obj(mode).schema\n column_names = [c['name'] for c in columns]\n schema_names = [s.name for s in schema]\n not_in_columns = [name for name in schema_names if name not in\n column_names]\n not_in_schema = [name for name in column_names if name not in\n schema_names]\n if not_in_columns:\n raise BaseDosDadosException(\n 'Column {error_columns} was not found in table_config.yaml. Are you sure that all your column names between table_config.yaml, publish.sql and {project_id}.{dataset_id}.{table_id} are the same?'\n .format(error_columns=not_in_columns, project_id=self.\n table_config['project_id_prod'], dataset_id=self.\n table_config['dataset_id'], table_id=self.table_config[\n 'table_id']))\n if not_in_schema:\n raise BaseDosDadosException(\n 'Column {error_columns} was not found in publish.sql. Are you sure that all your column names between table_config.yaml, publish.sql and {project_id}.{dataset_id}.{table_id} are the same?'\n .format(error_columns=not_in_schema, project_id=self.\n table_config['project_id_prod'], dataset_id=self.\n table_config['dataset_id'], table_id=self.table_config[\n 'table_id']))\n for c in columns:\n for s in schema:\n if c['name'] == s.name:\n c['type'] = s.field_type\n c['mode'] = s.mode\n break\n json.dump(columns, json_path.open('w', encoding='utf-8'))\n return self.client[f'bigquery_{mode}'].schema_from_json(str(json_path))\n\n def _make_publish_sql(self):\n \"\"\"Create publish.sql with columns and bigquery_type\"\"\"\n publish_txt = \"\"\"\n /*\n Query para publicar a tabela.\n\n Esse é o lugar para:\n - modificar nomes, ordem e tipos de colunas\n - dar join com outras tabelas\n - criar colunas extras (e.g. logs, proporções, etc.)\n\n Qualquer coluna definida aqui deve também existir em `table_config.yaml`.\n\n # Além disso, sinta-se à vontade para alterar alguns nomes obscuros\n # para algo um pouco mais explícito.\n\n TIPOS:\n - Para modificar tipos de colunas, basta substituir STRING por outro tipo válido.\n - Exemplo: `SAFE_CAST(column_name AS NUMERIC) column_name`\n - Mais detalhes: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types\n */\n \"\"\"\n publish_txt = inspect.cleandoc(publish_txt)\n publish_txt = textwrap.dedent(publish_txt)\n project_id_prod = self.client['bigquery_prod'].project\n publish_txt += f\"\"\"\n\nCREATE VIEW {project_id_prod}.{self.dataset_id}.{self.table_id} AS\nSELECT \n\"\"\"\n if self._is_partitioned():\n columns = sorted(self.table_config['columns'], key=lambda k: (k\n ['is_partition'] is not None, k['is_partition']), reverse=True)\n else:\n columns = self.table_config['columns']\n for col in columns:\n name = col['name']\n bigquery_type = 'STRING' if col['bigquery_type'] is None else col[\n 'bigquery_type'].upper()\n publish_txt += f'SAFE_CAST({name} AS {bigquery_type}) {name},\\n'\n publish_txt = publish_txt[:-2] + '\\n'\n project_id_staging = self.client['bigquery_staging'].project\n publish_txt += (\n f'FROM {project_id_staging}.{self.dataset_id}_staging.{self.table_id} AS t'\n )\n (self.table_folder / 'publish.sql').open('w', encoding='utf-8').write(\n publish_txt)\n\n def _make_template(self, columns, partition_columns,\n if_table_config_exists, force_columns):\n self.metadata.create(if_exists=if_table_config_exists, columns=\n partition_columns + columns, partition_columns=\n partition_columns, force_columns=force_columns, table_only=False)\n self._make_publish_sql()\n\n @staticmethod\n def _sheet_to_df(columns_config_url_or_path):\n \"\"\"\n Convert sheet to dataframe\n \"\"\"\n url = columns_config_url_or_path.replace('edit#gid=',\n 'export?format=csv&gid=')\n try:\n return pd.read_csv(StringIO(requests.get(url, timeout=10).\n content.decode('utf-8')))\n except Exception as e:\n raise BaseDosDadosException(\n 'Check if your google sheet Share are: Anyone on the internet with this link can view'\n ) from e\n\n def table_exists(self, mode):\n \"\"\"Check if table exists in BigQuery.\n\n Args:\n mode (str): Which dataset to check [prod|staging].\n \"\"\"\n try:\n ref = self._get_table_obj(mode=mode)\n except google.api_core.exceptions.NotFound:\n ref = None\n return bool(ref)\n\n def update_columns(self, columns_config_url_or_path=None):\n \"\"\"\n Fills columns in table_config.yaml automatically using a public google sheets URL or a local file. Also regenerate\n publish.sql and autofill type using bigquery_type.\n\n The sheet must contain the columns:\n - name: column name\n - description: column description\n - bigquery_type: column bigquery type\n - measurement_unit: column mesurement unit\n - covered_by_dictionary: column related dictionary\n - directory_column: column related directory in the format <dataset_id>.<table_id>:<column_name>\n - temporal_coverage: column temporal coverage\n - has_sensitive_data: the column has sensitive data\n - observations: column observations\n Args:\n columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL.\n Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats.\n Google sheets URL must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>.\n\n \"\"\"\n ruamel = ryaml.YAML()\n ruamel.preserve_quotes = True\n ruamel.indent(mapping=4, sequence=6, offset=4)\n table_config_yaml = ruamel.load((self.table_folder /\n 'table_config.yaml').open(encoding='utf-8'))\n if ('https://docs.google.com/spreadsheets/d/' in\n columns_config_url_or_path):\n if ('edit#gid=' not in columns_config_url_or_path or \n 'https://docs.google.com/spreadsheets/d/' not in\n columns_config_url_or_path or not\n columns_config_url_or_path.split('=')[1].isdigit()):\n raise BaseDosDadosException(\n 'The Google sheet url not in correct format.The url must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>'\n )\n df = self._sheet_to_df(columns_config_url_or_path)\n else:\n file_type = columns_config_url_or_path.split('.')[-1]\n if file_type == 'csv':\n df = pd.read_csv(columns_config_url_or_path, encoding='utf-8')\n elif file_type in ['xls', 'xlsx', 'xlsm', 'xlsb', 'odf', 'ods',\n 'odt']:\n df = pd.read_excel(columns_config_url_or_path)\n else:\n raise BaseDosDadosException(\n 'File not suported. Only csv, xls, xlsx, xlsm, xlsb, odf, ods, odt are supported.'\n )\n df = df.fillna('NULL')\n required_columns = ['name', 'bigquery_type', 'description',\n 'temporal_coverage', 'covered_by_dictionary',\n 'directory_column', 'measurement_unit', 'has_sensitive_data',\n 'observations']\n not_found_columns = required_columns.copy()\n for sheet_column in df.columns.tolist():\n for required_column in required_columns:\n if sheet_column == required_column:\n not_found_columns.remove(required_column)\n if not_found_columns:\n raise BaseDosDadosException(\n f\"The following required columns are not found: {', '.join(not_found_columns)}.\"\n )\n columns_parameters = zip(*[df[required_column].tolist() for\n required_column in required_columns])\n for name, bigquery_type, description, temporal_coverage, covered_by_dictionary, directory_column, measurement_unit, has_sensitive_data, observations in columns_parameters:\n for col in table_config_yaml['columns']:\n if col['name'] == name:\n col['bigquery_type'] = col['bigquery_type'\n ] if bigquery_type == 'NULL' else bigquery_type.lower()\n col['description'] = col['description'\n ] if description == 'NULL' else description\n col['temporal_coverage'] = col['temporal_coverage'\n ] if temporal_coverage == 'NULL' else [\n temporal_coverage]\n col['covered_by_dictionary'] = ('no' if \n covered_by_dictionary == 'NULL' else\n covered_by_dictionary)\n dataset = directory_column.split('.')[0]\n col['directory_column']['dataset_id'] = col[\n 'directory_column']['dataset_id'\n ] if dataset == 'NULL' else dataset\n table = directory_column.split('.')[-1].split(':')[0]\n col['directory_column']['table_id'] = col[\n 'directory_column']['table_id'\n ] if table == 'NULL' else table\n column = directory_column.split('.')[-1].split(':')[-1]\n col['directory_column']['column_name'] = col[\n 'directory_column']['column_name'\n ] if column == 'NULL' else column\n col['measurement_unit'] = col['measurement_unit'\n ] if measurement_unit == 'NULL' else measurement_unit\n col['has_sensitive_data'] = ('no' if has_sensitive_data ==\n 'NULL' else has_sensitive_data)\n col['observations'] = col['observations'\n ] if observations == 'NULL' else observations\n with open(self.table_folder / 'table_config.yaml', 'w', encoding=\n 'utf-8') as f:\n ruamel.dump(table_config_yaml, f)\n self._make_publish_sql()\n\n def init(self, data_sample_path=None, if_folder_exists='raise',\n if_table_config_exists='raise', source_format='csv', force_columns=\n False, columns_config_url_or_path=None):\n \"\"\"Initialize table folder at metadata_path at `metadata_path/<dataset_id>/<table_id>`.\n\n The folder should contain:\n\n * `table_config.yaml`\n * `publish.sql`\n\n You can also point to a sample of the data to auto complete columns names.\n\n Args:\n data_sample_path (str, pathlib.PosixPath): Optional.\n Data sample path to auto complete columns names\n It supports Comma Delimited CSV, Apache Avro and\n Apache Parquet.\n if_folder_exists (str): Optional.\n What to do if table folder exists\n\n * 'raise' : Raises FileExistsError\n * 'replace' : Replace folder\n * 'pass' : Do nothing\n if_table_config_exists (str): Optional\n What to do if table_config.yaml and publish.sql exists\n\n * 'raise' : Raises FileExistsError\n * 'replace' : Replace files with blank template\n * 'pass' : Do nothing\n source_format (str): Optional\n Data source format. Only 'csv', 'avro' and 'parquet'\n are supported. Defaults to 'csv'.\n force_columns (bool): Optional.\n If set to `True`, overwrite CKAN's columns with the ones provi\n ded.\n If set to `False`, keep CKAN's columns instead of the ones pro\n vided.\n columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL.\n Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats.\n Google sheets URL must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>.\n\n Raises:\n FileExistsError: If folder exists and replace is False.\n NotImplementedError: If data sample is not in supported type or format.\n \"\"\"\n if not self.dataset_folder.exists():\n raise FileExistsError(\n f'Dataset folder {self.dataset_folder} folder does not exists. Create a dataset before adding tables.'\n )\n try:\n self.table_folder.mkdir(exist_ok=if_folder_exists == 'replace')\n except FileExistsError as e:\n if if_folder_exists == 'raise':\n raise FileExistsError(\n f'Table folder already exists for {self.table_id}. '\n ) from e\n if if_folder_exists == 'pass':\n return self\n if not data_sample_path and if_table_config_exists != 'pass':\n raise BaseDosDadosException(\n 'You must provide a path to correctly create config files')\n partition_columns = []\n if isinstance(data_sample_path, (str, Path)):\n data_sample_path = Path(data_sample_path)\n if data_sample_path.is_dir():\n data_sample_path = [f for f in data_sample_path.glob('**/*'\n ) if f.is_file() and f.suffix == f'.{source_format}'][0]\n partition_columns = [k.split('=')[0] for k in\n data_sample_path.as_posix().split('/') if '=' in k]\n columns = Datatype(self, source_format).header(data_sample_path)\n else:\n columns = ['column_name']\n if if_table_config_exists == 'pass':\n if Path(self.table_folder / 'table_config.yaml').is_file(\n ) and Path(self.table_folder / 'publish.sql').is_file():\n pass\n elif not data_sample_path:\n raise BaseDosDadosException(\n 'You must provide a path to correctly create config files')\n else:\n self._make_template(columns, partition_columns,\n if_table_config_exists, force_columns=force_columns)\n elif if_table_config_exists == 'raise':\n if Path(self.table_folder / 'table_config.yaml').is_file(\n ) and Path(self.table_folder / 'publish.sql').is_file():\n raise FileExistsError(\n f'table_config.yaml and publish.sql already exists at {self.table_folder}'\n )\n self._make_template(columns, partition_columns,\n if_table_config_exists, force_columns=force_columns)\n else:\n self._make_template(columns, partition_columns,\n if_table_config_exists, force_columns=force_columns)\n if columns_config_url_or_path is not None:\n self.update_columns(columns_config_url_or_path)\n return self\n <mask token>\n\n def update(self, mode='all'):\n \"\"\"Updates BigQuery schema and description.\n Args:\n mode (str): Optional.\n Table of which table to update [prod|staging|all]\n not_found_ok (bool): Optional.\n What to do if table is not found\n \"\"\"\n self._check_mode(mode)\n mode = ['prod', 'staging'] if mode == 'all' else [mode]\n for m in mode:\n try:\n table = self._get_table_obj(m)\n except google.api_core.exceptions.NotFound:\n continue\n table.description = self._render_template(Path(\n 'table/table_description.txt'), self.table_config)\n with open(self.metadata_path / self.dataset_id / self.table_id /\n 'table_description.txt', 'w', encoding='utf-8') as f:\n f.write(table.description)\n table.schema = self._load_schema(m)\n fields = ['description', 'schema'] if m == 'prod' else [\n 'description']\n self.client[f'bigquery_{m}'].update_table(table, fields=fields)\n logger.success(' {object} {object_id} was {action}!', object_id=\n self.table_id, object='Table', action='updated')\n\n def publish(self, if_exists='raise'):\n \"\"\"Creates BigQuery table at production dataset.\n\n Table should be located at `<dataset_id>.<table_id>`.\n\n It creates a view that uses the query from\n `<metadata_path>/<dataset_id>/<table_id>/publish.sql`.\n\n Make sure that all columns from the query also exists at\n `<metadata_path>/<dataset_id>/<table_id>/table_config.sql`, including\n the partitions.\n\n Args:\n if_exists (str): Optional.\n What to do if table exists.\n\n * 'raise' : Raises Conflict exception\n * 'replace' : Replace table\n * 'pass' : Do nothing\n\n Todo:\n\n * Check if all required fields are filled\n \"\"\"\n if if_exists == 'replace':\n self.delete(mode='prod')\n self.client['bigquery_prod'].query((self.table_folder /\n 'publish.sql').open('r', encoding='utf-8').read()).result()\n self.update()\n logger.success(' {object} {object_id} was {action}!', object_id=\n self.table_id, object='Table', action='published')\n\n def delete(self, mode):\n \"\"\"Deletes table in BigQuery.\n\n Args:\n mode (str): Table of which table to delete [prod|staging]\n \"\"\"\n self._check_mode(mode)\n if mode == 'all':\n for m, n in self.table_full_name[mode].items():\n self.client[f'bigquery_{m}'].delete_table(n, not_found_ok=True)\n logger.info(' {object} {object_id}_{mode} was {action}!',\n object_id=self.table_id, mode=mode, object='Table', action=\n 'deleted')\n else:\n self.client[f'bigquery_{mode}'].delete_table(self.\n table_full_name[mode], not_found_ok=True)\n logger.info(' {object} {object_id}_{mode} was {action}!',\n object_id=self.table_id, mode=mode, object='Table', action=\n 'deleted')\n\n def append(self, filepath, partitions=None, if_exists='replace',\n chunk_size=None, **upload_args):\n \"\"\"Appends new data to existing BigQuery table.\n\n As long as the data has the same schema. It appends the data in the\n filepath to the existing table.\n\n Args:\n filepath (str or pathlib.PosixPath): Where to find the file that you want to upload to create a table with\n partitions (str, pathlib.PosixPath, dict): Optional.\n Hive structured partition as a string or dict\n\n * str : `<key>=<value>/<key2>=<value2>`\n * dict: `dict(key=value, key2=value2)`\n if_exists (str): 0ptional.\n What to do if data with same name exists in storage\n\n * 'raise' : Raises Conflict exception\n * 'replace' : Replace table\n * 'pass' : Do nothing\n chunk_size (int): Optional\n The size of a chunk of data whenever iterating (in bytes).\n This must be a multiple of 256 KB per the API specification.\n If not specified, the chunk_size of the blob itself is used. If that is not specified, a default value of 40 MB is used.\n \"\"\"\n if not self.table_exists('staging'):\n raise BaseDosDadosException(\n 'You cannot append to a table that does not exist')\n Storage(self.dataset_id, self.table_id, **self.main_vars).upload(\n filepath, mode='staging', partitions=partitions, if_exists=\n if_exists, chunk_size=chunk_size, **upload_args)\n logger.success(' {object} {object_id} was {action}!', object_id=\n self.table_id, object='Table', action='appended')\n",
"step-5": "\"\"\"\nClass for manage tables in Storage and Big Query\n\"\"\"\n# pylint: disable=invalid-name, too-many-locals, too-many-branches, too-many-arguments,line-too-long,R0801,consider-using-f-string\nfrom pathlib import Path\nimport json\nfrom copy import deepcopy\nimport textwrap\nimport inspect\nfrom io import StringIO\n\nfrom loguru import logger\nfrom google.cloud import bigquery\nimport ruamel.yaml as ryaml\nimport requests\nimport pandas as pd\nimport google.api_core.exceptions\n\nfrom basedosdados.upload.base import Base\nfrom basedosdados.upload.storage import Storage\nfrom basedosdados.upload.dataset import Dataset\nfrom basedosdados.upload.datatypes import Datatype\nfrom basedosdados.upload.metadata import Metadata\nfrom basedosdados.exceptions import BaseDosDadosException\n\n\nclass Table(Base):\n \"\"\"\n Manage tables in Google Cloud Storage and BigQuery.\n \"\"\"\n\n def __init__(self, dataset_id, table_id, **kwargs):\n super().__init__(**kwargs)\n\n self.table_id = table_id.replace(\"-\", \"_\")\n self.dataset_id = dataset_id.replace(\"-\", \"_\")\n self.dataset_folder = Path(self.metadata_path / self.dataset_id)\n self.table_folder = self.dataset_folder / table_id\n self.table_full_name = dict(\n prod=f\"{self.client['bigquery_prod'].project}.{self.dataset_id}.{self.table_id}\",\n staging=f\"{self.client['bigquery_staging'].project}.{self.dataset_id}_staging.{self.table_id}\",\n )\n self.table_full_name.update(dict(all=deepcopy(self.table_full_name)))\n self.metadata = Metadata(self.dataset_id, self.table_id, **kwargs)\n\n @property\n def table_config(self):\n \"\"\"\n Load table_config.yaml\n \"\"\"\n return self._load_yaml(self.table_folder / \"table_config.yaml\")\n\n def _get_table_obj(self, mode):\n \"\"\"\n Get table object from BigQuery\n \"\"\"\n return self.client[f\"bigquery_{mode}\"].get_table(self.table_full_name[mode])\n\n def _is_partitioned(self):\n \"\"\"\n Check if table is partitioned\n \"\"\"\n ## check if the table are partitioned, need the split because of a change in the type of partitions in pydantic\n partitions = self.table_config[\"partitions\"]\n if partitions is None or len(partitions) == 0:\n return False\n\n if isinstance(partitions, list):\n # check if any None inside list.\n # False if it is the case Ex: [None, 'partition']\n # True otherwise Ex: ['partition1', 'partition2']\n return all(item is not None for item in partitions)\n\n raise ValueError(\"Partitions must be a list or None\")\n\n def _load_schema(self, mode=\"staging\"):\n \"\"\"Load schema from table_config.yaml\n\n Args:\n mode (bool): Which dataset to create [prod|staging].\n \"\"\"\n\n self._check_mode(mode)\n\n json_path = self.table_folder / f\"schema-{mode}.json\"\n columns = self.table_config[\"columns\"]\n\n if mode == \"staging\":\n new_columns = []\n for c in columns:\n # case is_in_staging are None then must be True\n is_in_staging = (\n True if c.get(\"is_in_staging\") is None else c[\"is_in_staging\"]\n )\n # append columns declared in table_config.yaml to schema only if is_in_staging: True\n if is_in_staging and not c.get(\"is_partition\"):\n c[\"type\"] = \"STRING\"\n new_columns.append(c)\n\n del columns\n columns = new_columns\n\n elif mode == \"prod\":\n schema = self._get_table_obj(mode).schema\n\n # get field names for fields at schema and at table_config.yaml\n column_names = [c[\"name\"] for c in columns]\n schema_names = [s.name for s in schema]\n\n # check if there are mismatched fields\n not_in_columns = [name for name in schema_names if name not in column_names]\n not_in_schema = [name for name in column_names if name not in schema_names]\n\n # raise if field is not in table_config\n if not_in_columns:\n raise BaseDosDadosException(\n \"Column {error_columns} was not found in table_config.yaml. Are you sure that \"\n \"all your column names between table_config.yaml, publish.sql and \"\n \"{project_id}.{dataset_id}.{table_id} are the same?\".format(\n error_columns=not_in_columns,\n project_id=self.table_config[\"project_id_prod\"],\n dataset_id=self.table_config[\"dataset_id\"],\n table_id=self.table_config[\"table_id\"],\n )\n )\n\n # raise if field is not in schema\n if not_in_schema:\n raise BaseDosDadosException(\n \"Column {error_columns} was not found in publish.sql. Are you sure that \"\n \"all your column names between table_config.yaml, publish.sql and \"\n \"{project_id}.{dataset_id}.{table_id} are the same?\".format(\n error_columns=not_in_schema,\n project_id=self.table_config[\"project_id_prod\"],\n dataset_id=self.table_config[\"dataset_id\"],\n table_id=self.table_config[\"table_id\"],\n )\n )\n\n # if field is in schema, get field_type and field_mode\n for c in columns:\n for s in schema:\n if c[\"name\"] == s.name:\n c[\"type\"] = s.field_type\n c[\"mode\"] = s.mode\n break\n ## force utf-8, write schema_{mode}.json\n json.dump(columns, (json_path).open(\"w\", encoding=\"utf-8\"))\n\n # load new created schema\n return self.client[f\"bigquery_{mode}\"].schema_from_json(str(json_path))\n\n def _make_publish_sql(self):\n \"\"\"Create publish.sql with columns and bigquery_type\"\"\"\n\n ### publish.sql header and instructions\n publish_txt = \"\"\"\n /*\n Query para publicar a tabela.\n\n Esse é o lugar para:\n - modificar nomes, ordem e tipos de colunas\n - dar join com outras tabelas\n - criar colunas extras (e.g. logs, proporções, etc.)\n\n Qualquer coluna definida aqui deve também existir em `table_config.yaml`.\n\n # Além disso, sinta-se à vontade para alterar alguns nomes obscuros\n # para algo um pouco mais explícito.\n\n TIPOS:\n - Para modificar tipos de colunas, basta substituir STRING por outro tipo válido.\n - Exemplo: `SAFE_CAST(column_name AS NUMERIC) column_name`\n - Mais detalhes: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types\n */\n \"\"\"\n\n # remove triple quotes extra space\n publish_txt = inspect.cleandoc(publish_txt)\n publish_txt = textwrap.dedent(publish_txt)\n\n # add create table statement\n project_id_prod = self.client[\"bigquery_prod\"].project\n publish_txt += f\"\\n\\nCREATE VIEW {project_id_prod}.{self.dataset_id}.{self.table_id} AS\\nSELECT \\n\"\n\n # sort columns by is_partition, partitions_columns come first\n\n if self._is_partitioned():\n columns = sorted(\n self.table_config[\"columns\"],\n key=lambda k: (k[\"is_partition\"] is not None, k[\"is_partition\"]),\n reverse=True,\n )\n else:\n columns = self.table_config[\"columns\"]\n\n # add columns in publish.sql\n for col in columns:\n name = col[\"name\"]\n bigquery_type = (\n \"STRING\"\n if col[\"bigquery_type\"] is None\n else col[\"bigquery_type\"].upper()\n )\n\n publish_txt += f\"SAFE_CAST({name} AS {bigquery_type}) {name},\\n\"\n ## remove last comma\n publish_txt = publish_txt[:-2] + \"\\n\"\n\n # add from statement\n project_id_staging = self.client[\"bigquery_staging\"].project\n publish_txt += (\n f\"FROM {project_id_staging}.{self.dataset_id}_staging.{self.table_id} AS t\"\n )\n\n # save publish.sql in table_folder\n (self.table_folder / \"publish.sql\").open(\"w\", encoding=\"utf-8\").write(\n publish_txt\n )\n\n def _make_template(self, columns, partition_columns, if_table_config_exists, force_columns):\n # create table_config.yaml with metadata\n self.metadata.create(\n if_exists=if_table_config_exists,\n columns=partition_columns + columns,\n partition_columns=partition_columns,\n force_columns=force_columns,\n table_only=False,\n )\n\n self._make_publish_sql()\n\n @staticmethod\n def _sheet_to_df(columns_config_url_or_path):\n \"\"\"\n Convert sheet to dataframe\n \"\"\"\n url = columns_config_url_or_path.replace(\"edit#gid=\", \"export?format=csv&gid=\")\n try:\n return pd.read_csv(StringIO(requests.get(url, timeout=10).content.decode(\"utf-8\")))\n except Exception as e:\n raise BaseDosDadosException(\n \"Check if your google sheet Share are: Anyone on the internet with this link can view\"\n ) from e\n\n def table_exists(self, mode):\n \"\"\"Check if table exists in BigQuery.\n\n Args:\n mode (str): Which dataset to check [prod|staging].\n \"\"\"\n\n try:\n ref = self._get_table_obj(mode=mode)\n except google.api_core.exceptions.NotFound:\n ref = None\n\n return bool(ref)\n\n def update_columns(self, columns_config_url_or_path=None):\n \"\"\"\n Fills columns in table_config.yaml automatically using a public google sheets URL or a local file. Also regenerate\n publish.sql and autofill type using bigquery_type.\n\n The sheet must contain the columns:\n - name: column name\n - description: column description\n - bigquery_type: column bigquery type\n - measurement_unit: column mesurement unit\n - covered_by_dictionary: column related dictionary\n - directory_column: column related directory in the format <dataset_id>.<table_id>:<column_name>\n - temporal_coverage: column temporal coverage\n - has_sensitive_data: the column has sensitive data\n - observations: column observations\n Args:\n columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL.\n Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats.\n Google sheets URL must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>.\n\n \"\"\"\n ruamel = ryaml.YAML()\n ruamel.preserve_quotes = True\n ruamel.indent(mapping=4, sequence=6, offset=4)\n table_config_yaml = ruamel.load(\n (self.table_folder / \"table_config.yaml\").open(encoding=\"utf-8\")\n )\n\n if \"https://docs.google.com/spreadsheets/d/\" in columns_config_url_or_path:\n if (\n \"edit#gid=\" not in columns_config_url_or_path\n or \"https://docs.google.com/spreadsheets/d/\"\n not in columns_config_url_or_path\n or not columns_config_url_or_path.split(\"=\")[1].isdigit()\n ):\n raise BaseDosDadosException(\n \"The Google sheet url not in correct format.\"\n \"The url must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>\"\n )\n df = self._sheet_to_df(columns_config_url_or_path)\n else:\n file_type = columns_config_url_or_path.split(\".\")[-1]\n if file_type == \"csv\":\n df = pd.read_csv(columns_config_url_or_path, encoding=\"utf-8\")\n elif file_type in [\"xls\", \"xlsx\", \"xlsm\", \"xlsb\", \"odf\", \"ods\", \"odt\"]:\n df = pd.read_excel(columns_config_url_or_path)\n else:\n raise BaseDosDadosException(\n \"File not suported. Only csv, xls, xlsx, xlsm, xlsb, odf, ods, odt are supported.\"\n )\n\n df = df.fillna(\"NULL\")\n\n required_columns = [\n \"name\",\n \"bigquery_type\",\n \"description\",\n \"temporal_coverage\",\n \"covered_by_dictionary\",\n \"directory_column\",\n \"measurement_unit\",\n \"has_sensitive_data\",\n \"observations\",\n ]\n\n not_found_columns = required_columns.copy()\n for sheet_column in df.columns.tolist():\n for required_column in required_columns:\n if sheet_column == required_column:\n not_found_columns.remove(required_column)\n if not_found_columns:\n raise BaseDosDadosException(\n f\"The following required columns are not found: {', '.join(not_found_columns)}.\"\n )\n\n columns_parameters = zip(\n *[df[required_column].tolist() for required_column in required_columns]\n )\n for (\n name,\n bigquery_type,\n description,\n temporal_coverage,\n covered_by_dictionary,\n directory_column,\n measurement_unit,\n has_sensitive_data,\n observations,\n ) in columns_parameters:\n for col in table_config_yaml[\"columns\"]:\n if col[\"name\"] == name:\n col[\"bigquery_type\"] = (\n col[\"bigquery_type\"]\n if bigquery_type == \"NULL\"\n else bigquery_type.lower()\n )\n\n col[\"description\"] = (\n col[\"description\"] if description == \"NULL\" else description\n )\n\n col[\"temporal_coverage\"] = (\n col[\"temporal_coverage\"]\n if temporal_coverage == \"NULL\"\n else [temporal_coverage]\n )\n\n col[\"covered_by_dictionary\"] = (\n \"no\"\n if covered_by_dictionary == \"NULL\"\n else covered_by_dictionary\n )\n\n dataset = directory_column.split(\".\")[0]\n col[\"directory_column\"][\"dataset_id\"] = (\n col[\"directory_column\"][\"dataset_id\"]\n if dataset == \"NULL\"\n else dataset\n )\n\n table = directory_column.split(\".\")[-1].split(\":\")[0]\n col[\"directory_column\"][\"table_id\"] = (\n col[\"directory_column\"][\"table_id\"]\n if table == \"NULL\"\n else table\n )\n\n column = directory_column.split(\".\")[-1].split(\":\")[-1]\n col[\"directory_column\"][\"column_name\"] = (\n col[\"directory_column\"][\"column_name\"]\n if column == \"NULL\"\n else column\n )\n col[\"measurement_unit\"] = (\n col[\"measurement_unit\"]\n if measurement_unit == \"NULL\"\n else measurement_unit\n )\n\n col[\"has_sensitive_data\"] = (\n \"no\" if has_sensitive_data == \"NULL\" else has_sensitive_data\n )\n\n col[\"observations\"] = (\n col[\"observations\"] if observations == \"NULL\" else observations\n )\n\n with open(self.table_folder / \"table_config.yaml\", \"w\", encoding=\"utf-8\") as f:\n ruamel.dump(table_config_yaml, f)\n\n # regenerate publish.sql\n self._make_publish_sql()\n\n def init(\n self,\n data_sample_path=None,\n if_folder_exists=\"raise\",\n if_table_config_exists=\"raise\",\n source_format=\"csv\",\n force_columns = False,\n columns_config_url_or_path=None,\n ): # sourcery skip: low-code-quality\n \"\"\"Initialize table folder at metadata_path at `metadata_path/<dataset_id>/<table_id>`.\n\n The folder should contain:\n\n * `table_config.yaml`\n * `publish.sql`\n\n You can also point to a sample of the data to auto complete columns names.\n\n Args:\n data_sample_path (str, pathlib.PosixPath): Optional.\n Data sample path to auto complete columns names\n It supports Comma Delimited CSV, Apache Avro and\n Apache Parquet.\n if_folder_exists (str): Optional.\n What to do if table folder exists\n\n * 'raise' : Raises FileExistsError\n * 'replace' : Replace folder\n * 'pass' : Do nothing\n if_table_config_exists (str): Optional\n What to do if table_config.yaml and publish.sql exists\n\n * 'raise' : Raises FileExistsError\n * 'replace' : Replace files with blank template\n * 'pass' : Do nothing\n source_format (str): Optional\n Data source format. Only 'csv', 'avro' and 'parquet'\n are supported. Defaults to 'csv'.\n force_columns (bool): Optional.\n If set to `True`, overwrite CKAN's columns with the ones provi\n ded.\n If set to `False`, keep CKAN's columns instead of the ones pro\n vided.\n columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL.\n Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats.\n Google sheets URL must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>.\n\n Raises:\n FileExistsError: If folder exists and replace is False.\n NotImplementedError: If data sample is not in supported type or format.\n \"\"\"\n if not self.dataset_folder.exists():\n\n raise FileExistsError(\n f\"Dataset folder {self.dataset_folder} folder does not exists. \"\n \"Create a dataset before adding tables.\"\n )\n\n try:\n self.table_folder.mkdir(exist_ok=(if_folder_exists == \"replace\"))\n except FileExistsError as e:\n if if_folder_exists == \"raise\":\n raise FileExistsError(\n f\"Table folder already exists for {self.table_id}. \"\n ) from e\n if if_folder_exists == \"pass\":\n return self\n\n if not data_sample_path and if_table_config_exists != \"pass\":\n raise BaseDosDadosException(\n \"You must provide a path to correctly create config files\"\n )\n\n partition_columns = []\n if isinstance(\n data_sample_path,\n (\n str,\n Path,\n ),\n ):\n # Check if partitioned and get data sample and partition columns\n data_sample_path = Path(data_sample_path)\n\n if data_sample_path.is_dir():\n\n data_sample_path = [\n f\n for f in data_sample_path.glob(\"**/*\")\n if f.is_file() and f.suffix == f\".{source_format}\"\n ][0]\n\n partition_columns = [\n k.split(\"=\")[0]\n for k in data_sample_path.as_posix().split(\"/\")\n if \"=\" in k\n ]\n\n columns = Datatype(self, source_format).header(data_sample_path)\n\n else:\n\n columns = [\"column_name\"]\n\n if if_table_config_exists == \"pass\":\n # Check if config files exists before passing\n if (\n Path(self.table_folder / \"table_config.yaml\").is_file()\n and Path(self.table_folder / \"publish.sql\").is_file()\n ):\n pass\n # Raise if no sample to determine columns\n elif not data_sample_path:\n raise BaseDosDadosException(\n \"You must provide a path to correctly create config files\"\n )\n else:\n self._make_template(columns, partition_columns, if_table_config_exists, force_columns=force_columns)\n\n elif if_table_config_exists == \"raise\":\n\n # Check if config files already exist\n if (\n Path(self.table_folder / \"table_config.yaml\").is_file()\n and Path(self.table_folder / \"publish.sql\").is_file()\n ):\n\n raise FileExistsError(\n f\"table_config.yaml and publish.sql already exists at {self.table_folder}\"\n )\n # if config files don't exist, create them\n self._make_template(columns, partition_columns, if_table_config_exists, force_columns=force_columns)\n\n else:\n # Raise: without a path to data sample, should not replace config files with empty template\n self._make_template(columns, partition_columns, if_table_config_exists, force_columns=force_columns)\n\n if columns_config_url_or_path is not None:\n self.update_columns(columns_config_url_or_path)\n\n return self\n\n def create(\n self,\n path=None,\n force_dataset=True,\n if_table_exists=\"raise\",\n if_storage_data_exists=\"raise\",\n if_table_config_exists=\"raise\",\n source_format=\"csv\",\n force_columns=False,\n columns_config_url_or_path=None,\n dataset_is_public=True,\n location=None,\n chunk_size=None,\n ):\n \"\"\"Creates BigQuery table at staging dataset.\n\n If you add a path, it automatically saves the data in the storage,\n creates a datasets folder and BigQuery location, besides creating the\n table and its configuration files.\n\n The new table should be located at `<dataset_id>_staging.<table_id>` in BigQuery.\n\n It looks for data saved in Storage at `<bucket_name>/staging/<dataset_id>/<table_id>/*`\n and builds the table.\n\n It currently supports the types:\n\n - Comma Delimited CSV\n - Apache Avro\n - Apache Parquet\n\n Data can also be partitioned following the hive partitioning scheme\n `<key1>=<value1>/<key2>=<value2>` - for instance,\n `year=2012/country=BR`. The partition is automatcally detected\n by searching for `partitions` on the `table_config.yaml`.\n\n Args:\n path (str or pathlib.PosixPath): Where to find the file that you want to upload to create a table with\n job_config_params (dict): Optional.\n Job configuration params from bigquery\n if_table_exists (str): Optional\n What to do if table exists\n\n * 'raise' : Raises Conflict exception\n * 'replace' : Replace table\n * 'pass' : Do nothing\n force_dataset (bool): Creates `<dataset_id>` folder and BigQuery Dataset if it doesn't exists.\n if_table_config_exists (str): Optional.\n What to do if config files already exist\n\n * 'raise': Raises FileExistError\n * 'replace': Replace with blank template\n * 'pass'; Do nothing\n if_storage_data_exists (str): Optional.\n What to do if data already exists on your bucket:\n\n * 'raise' : Raises Conflict exception\n * 'replace' : Replace table\n * 'pass' : Do nothing\n source_format (str): Optional\n Data source format. Only 'csv', 'avro' and 'parquet'\n are supported. Defaults to 'csv'.\n force_columns (bool): Optional.\n If set to `True`, overwrite CKAN's columns with the ones provi\n ded.\n If set to `False`, keep CKAN's columns instead of the ones pro\n vided.\n columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL.\n Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats.\n Google sheets URL must be in the format https://docs.google.com/spreadsheets/d/<table_key>/edit#gid=<table_gid>.\n\n dataset_is_public (bool): Control if prod dataset is public or not. By default staging datasets like `dataset_id_staging` are not public.\n\n location (str): Optional. Location of dataset data.\n List of possible region names locations: https://cloud.google.com/bigquery/docs/locations\n\n chunk_size (int): Optional\n The size of a chunk of data whenever iterating (in bytes).\n This must be a multiple of 256 KB per the API specification.\n If not specified, the chunk_size of the blob itself is used. If that is not specified, a default value of 40 MB is used.\n \"\"\"\n\n if path is None:\n\n # Look if table data already exists at Storage\n data = self.client[\"storage_staging\"].list_blobs(\n self.bucket_name, prefix=f\"staging/{self.dataset_id}/{self.table_id}\"\n )\n\n # Raise: Cannot create table without external data\n if not data:\n raise BaseDosDadosException(\n \"You must provide a path for uploading data\"\n )\n\n # Add data to storage\n if isinstance(\n path,\n (\n str,\n Path,\n ),\n ):\n\n Storage(self.dataset_id, self.table_id, **self.main_vars).upload(\n path,\n mode=\"staging\",\n if_exists=if_storage_data_exists,\n chunk_size=chunk_size,\n )\n\n # Create Dataset if it doesn't exist\n if force_dataset:\n\n dataset_obj = Dataset(self.dataset_id, **self.main_vars)\n\n try:\n dataset_obj.init()\n except FileExistsError:\n pass\n\n dataset_obj.create(\n if_exists=\"pass\", location=location, dataset_is_public=dataset_is_public\n )\n\n self.init(\n data_sample_path=path,\n if_folder_exists=\"replace\",\n if_table_config_exists=if_table_config_exists,\n columns_config_url_or_path=columns_config_url_or_path,\n source_format=source_format,\n force_columns=force_columns\n )\n\n table = bigquery.Table(self.table_full_name[\"staging\"])\n table.external_data_configuration = Datatype(\n self, source_format, \"staging\", partitioned=self._is_partitioned()\n ).external_config\n\n # Lookup if table alreay exists\n table_ref = None\n try:\n table_ref = self.client[\"bigquery_staging\"].get_table(\n self.table_full_name[\"staging\"]\n )\n\n except google.api_core.exceptions.NotFound:\n pass\n\n if isinstance(table_ref, google.cloud.bigquery.table.Table):\n\n if if_table_exists == \"pass\":\n\n return None\n\n if if_table_exists == \"raise\":\n\n raise FileExistsError(\n \"Table already exists, choose replace if you want to overwrite it\"\n )\n\n if if_table_exists == \"replace\":\n\n self.delete(mode=\"staging\")\n\n self.client[\"bigquery_staging\"].create_table(table)\n\n logger.success(\n \"{object} {object_id} was {action}!\",\n object_id=self.table_id,\n object=\"Table\",\n action=\"created\",\n )\n return None\n\n def update(self, mode=\"all\"):\n \"\"\"Updates BigQuery schema and description.\n Args:\n mode (str): Optional.\n Table of which table to update [prod|staging|all]\n not_found_ok (bool): Optional.\n What to do if table is not found\n \"\"\"\n\n self._check_mode(mode)\n\n mode = [\"prod\", \"staging\"] if mode == \"all\" else [mode]\n for m in mode:\n\n try:\n table = self._get_table_obj(m)\n except google.api_core.exceptions.NotFound:\n continue\n\n # if m == \"staging\":\n\n table.description = self._render_template(\n Path(\"table/table_description.txt\"), self.table_config\n )\n\n # save table description\n with open(\n self.metadata_path\n / self.dataset_id\n / self.table_id\n / \"table_description.txt\",\n \"w\",\n encoding=\"utf-8\",\n ) as f:\n f.write(table.description)\n\n # when mode is staging the table schema already exists\n table.schema = self._load_schema(m)\n fields = [\"description\", \"schema\"] if m == \"prod\" else [\"description\"]\n self.client[f\"bigquery_{m}\"].update_table(table, fields=fields)\n\n logger.success(\n \" {object} {object_id} was {action}!\",\n object_id=self.table_id,\n object=\"Table\",\n action=\"updated\",\n )\n\n def publish(self, if_exists=\"raise\"):\n \"\"\"Creates BigQuery table at production dataset.\n\n Table should be located at `<dataset_id>.<table_id>`.\n\n It creates a view that uses the query from\n `<metadata_path>/<dataset_id>/<table_id>/publish.sql`.\n\n Make sure that all columns from the query also exists at\n `<metadata_path>/<dataset_id>/<table_id>/table_config.sql`, including\n the partitions.\n\n Args:\n if_exists (str): Optional.\n What to do if table exists.\n\n * 'raise' : Raises Conflict exception\n * 'replace' : Replace table\n * 'pass' : Do nothing\n\n Todo:\n\n * Check if all required fields are filled\n \"\"\"\n\n if if_exists == \"replace\":\n self.delete(mode=\"prod\")\n\n self.client[\"bigquery_prod\"].query(\n (self.table_folder / \"publish.sql\").open(\"r\", encoding=\"utf-8\").read()\n ).result()\n\n self.update()\n logger.success(\n \" {object} {object_id} was {action}!\",\n object_id=self.table_id,\n object=\"Table\",\n action=\"published\",\n )\n\n def delete(self, mode):\n \"\"\"Deletes table in BigQuery.\n\n Args:\n mode (str): Table of which table to delete [prod|staging]\n \"\"\"\n\n self._check_mode(mode)\n\n if mode == \"all\":\n for m, n in self.table_full_name[mode].items():\n self.client[f\"bigquery_{m}\"].delete_table(n, not_found_ok=True)\n logger.info(\n \" {object} {object_id}_{mode} was {action}!\",\n object_id=self.table_id,\n mode=mode,\n object=\"Table\",\n action=\"deleted\",\n )\n else:\n self.client[f\"bigquery_{mode}\"].delete_table(\n self.table_full_name[mode], not_found_ok=True\n )\n\n logger.info(\n \" {object} {object_id}_{mode} was {action}!\",\n object_id=self.table_id,\n mode=mode,\n object=\"Table\",\n action=\"deleted\",\n )\n\n def append(\n self,\n filepath,\n partitions=None,\n if_exists=\"replace\",\n chunk_size=None,\n **upload_args,\n ):\n \"\"\"Appends new data to existing BigQuery table.\n\n As long as the data has the same schema. It appends the data in the\n filepath to the existing table.\n\n Args:\n filepath (str or pathlib.PosixPath): Where to find the file that you want to upload to create a table with\n partitions (str, pathlib.PosixPath, dict): Optional.\n Hive structured partition as a string or dict\n\n * str : `<key>=<value>/<key2>=<value2>`\n * dict: `dict(key=value, key2=value2)`\n if_exists (str): 0ptional.\n What to do if data with same name exists in storage\n\n * 'raise' : Raises Conflict exception\n * 'replace' : Replace table\n * 'pass' : Do nothing\n chunk_size (int): Optional\n The size of a chunk of data whenever iterating (in bytes).\n This must be a multiple of 256 KB per the API specification.\n If not specified, the chunk_size of the blob itself is used. If that is not specified, a default value of 40 MB is used.\n \"\"\"\n if not self.table_exists(\"staging\"):\n raise BaseDosDadosException(\n \"You cannot append to a table that does not exist\"\n )\n Storage(self.dataset_id, self.table_id, **self.main_vars).upload(\n filepath,\n mode=\"staging\",\n partitions=partitions,\n if_exists=if_exists,\n chunk_size=chunk_size,\n **upload_args,\n )\n logger.success(\n \" {object} {object_id} was {action}!\",\n object_id=self.table_id,\n object=\"Table\",\n action=\"appended\",\n )\n",
"step-ids": [
8,
12,
15,
16,
20
]
}
|
[
8,
12,
15,
16,
20
] |
def flat_list(array):
result = []
for element in array:
if type(element) == list:
result += flat_list(element)
else:
result.append(element)
return result
print flat_list([1, [2, 2, 2], 4])
print flat_list([-1, [1, [-2], 1], -1])
|
normal
|
{
"blob_id": "0d321193d68b463e3dd04b21ee611afdc212a22b",
"index": 4682,
"step-1": "def flat_list(array):\n result = []\n for element in array:\n if type(element) == list:\n result += flat_list(element)\n else:\n result.append(element)\n return result\n\n\nprint flat_list([1, [2, 2, 2], 4])\nprint flat_list([-1, [1, [-2], 1], -1])",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import sys, serial, time, signal, threading
from MFRC522 import MFRC522
from event import Event
class Sensor(threading.Thread):
# main program for reading and processing tags
def __init__(self, name):
threading.Thread.__init__(self)
self.name = name
self.continue_reading = False
self.tag_reader = MFRC522()
self.signal = signal.signal(signal.SIGINT, self.end_read)
self.last_tag = ''
#EVENTS
self.FOUND_TAG = Event()
def end_read(self, signal,frame):
print "Ctrl+C captured, ending read."
self.stop()
def stop(self):
self.continue_reading = False
def run(self):
print "sensor running"
self.continue_reading = True
#if RFID is working - start monitoring it
while self.continue_reading:
(status,TagType) = self.tag_reader.MFRC522_Request(self.tag_reader.PICC_REQIDL)
if status == self.tag_reader.MI_OK:
print "Card detected"
(status,backData) = self.tag_reader.MFRC522_Anticoll()
if status == self.tag_reader.MI_OK:
rfid_tag = "".join(str(val) for val in backData)
print 'TAG : %s' % rfid_tag
self.last_tag = rfid_tag
self.FOUND_TAG(self)
time.sleep(.1)
print 'not reading sensor'
# def start(self):
# print "sensor running"
# self.continue_reading = True
# #if RFID is working - start monitoring it
# while self.continue_reading:
# (status,TagType) = self.tag_reader.MFRC522_Request(self.tag_reader.PICC_REQIDL)
# if status == self.tag_reader.MI_OK:
# print "Card detected"
# (status,backData) = self.tag_reader.MFRC522_Anticoll()
# if status == self.tag_reader.MI_OK:
# rfid_tag = "".join(str(val) for val in backData)
# print 'TAG : %s' % rfid_tag
# self.last_tag = rfid_tag
# self.FOUND_TAG(self)
# time.sleep(.1)
# print 'not reading sensor'
|
normal
|
{
"blob_id": "c179d27f1620414061d376d4f30d2ddd4fd2750e",
"index": 3842,
"step-1": "import sys, serial, time, signal, threading\nfrom MFRC522 import MFRC522\nfrom event import Event\n\nclass Sensor(threading.Thread):\n\n\t# main program for reading and processing tags\n\tdef __init__(self, name):\n\t\tthreading.Thread.__init__(self)\n\t\tself.name = name\n\t\tself.continue_reading = False\n\t\tself.tag_reader = MFRC522()\n\t\tself.signal = signal.signal(signal.SIGINT, self.end_read)\n\n\t\tself.last_tag = ''\n\n\t\t#EVENTS\n\t\tself.FOUND_TAG = Event()\n\n\tdef end_read(self, signal,frame):\n\t\tprint \"Ctrl+C captured, ending read.\"\n\t\tself.stop()\n\n\tdef stop(self):\n\t\tself.continue_reading = False\n\n\tdef run(self):\n\t\tprint \"sensor running\"\n\t\tself.continue_reading = True\n\t\t#if RFID is working - start monitoring it\n\t\twhile self.continue_reading:\n\t\t\t(status,TagType) = self.tag_reader.MFRC522_Request(self.tag_reader.PICC_REQIDL)\n\t\t \n\t\t\tif status == self.tag_reader.MI_OK:\n\t\t\t\tprint \"Card detected\"\n\t\t \n\t\t\t(status,backData) = self.tag_reader.MFRC522_Anticoll()\n\t\t\tif status == self.tag_reader.MI_OK:\n\t\t\t\trfid_tag = \"\".join(str(val) for val in backData)\n\t\t\t\tprint 'TAG : %s' % rfid_tag\n\t\t\t\tself.last_tag = rfid_tag\n\t\t\t\tself.FOUND_TAG(self) \n\n\t\t\ttime.sleep(.1)\n\t\tprint 'not reading sensor' \n\n\t# def start(self):\n\t# \tprint \"sensor running\"\n\t# \tself.continue_reading = True\n\t# \t#if RFID is working - start monitoring it\n\t# \twhile self.continue_reading:\n\t# \t\t(status,TagType) = self.tag_reader.MFRC522_Request(self.tag_reader.PICC_REQIDL)\n\t\t \n\t# \t\tif status == self.tag_reader.MI_OK:\n\t# \t\t\tprint \"Card detected\"\n\t\t \n\t# \t\t(status,backData) = self.tag_reader.MFRC522_Anticoll()\n\t# \t\tif status == self.tag_reader.MI_OK:\n\t# \t\t\trfid_tag = \"\".join(str(val) for val in backData)\n\t# \t\t\tprint 'TAG : %s' % rfid_tag\n\t# \t\t\tself.last_tag = rfid_tag\n\t# \t\t\tself.FOUND_TAG(self) \n\n\t# \t\ttime.sleep(.1)\n\t# \tprint 'not reading sensor' \n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from utils.gradient_strategy.dct_generator import DCTGenerator
from utils.gradient_strategy.random_generator import RandomGenerator
from utils.gradient_strategy.upsample_generator import UpSampleGenerator
from utils.gradient_strategy.centerconv_generator import CenterConvGenerator
from utils.attack_setting import *
from utils.construct_model_data import construct_model_and_data
from utils.generate_model import ImageModel
from utils.generate_video import video
from utils.load_data import ImageData, split_data
from utils.show_or_save import *
from utils.gradient_strategy.centerconv_generator import CenterConvGenerator
|
normal
|
{
"blob_id": "399097ef7cfdc061b307c3cc29615c9f50b1e6bf",
"index": 5511,
"step-1": "<mask token>\n",
"step-2": "from utils.gradient_strategy.dct_generator import DCTGenerator\nfrom utils.gradient_strategy.random_generator import RandomGenerator\nfrom utils.gradient_strategy.upsample_generator import UpSampleGenerator\nfrom utils.gradient_strategy.centerconv_generator import CenterConvGenerator\nfrom utils.attack_setting import *\nfrom utils.construct_model_data import construct_model_and_data\nfrom utils.generate_model import ImageModel\nfrom utils.generate_video import video\nfrom utils.load_data import ImageData, split_data\nfrom utils.show_or_save import *\nfrom utils.gradient_strategy.centerconv_generator import CenterConvGenerator\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if horast >= 41:
print('Valor a Pagar: ', resp3)
elif horast <= 40:
print('Valor a Pagar: ', resp4)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
horast = int(input('Horas Trabajadas: ' + '\n\t\t'))
tarifa = int(input('Tarifa por hora: ' + '\n\t\t'))
descu = int(input('Descuentos: ' + '\n\t\t'))
resp0 = horast - descu
resp1 = resp0 * tarifa / 2
resp2 = horast * tarifa + resp1
resp3 = resp2 - descu
resp4 = horast * tarifa
if horast >= 41:
print('Valor a Pagar: ', resp3)
elif horast <= 40:
print('Valor a Pagar: ', resp4)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 25 15:14:15 2020
@author: luisa
"""
horast = int(input("Horas Trabajadas: "+"\n\t\t"))
tarifa = int(input("Tarifa por hora: "+"\n\t\t"))
descu = int(input("Descuentos: "+"\n\t\t"))
resp0 = horast - descu
resp1 = (resp0 * tarifa)/2
resp2 = (horast * tarifa) + resp1
resp3 = resp2 - descu
resp4 = horast * tarifa
if horast >= 41:
print("Valor a Pagar: ", resp3)
elif horast <= 40:
print("Valor a Pagar: ", resp4)
|
flexible
|
{
"blob_id": "4d9575c178b672815bb561116689b9b0721cb5ba",
"index": 919,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif horast >= 41:\n print('Valor a Pagar: ', resp3)\nelif horast <= 40:\n print('Valor a Pagar: ', resp4)\n",
"step-3": "<mask token>\nhorast = int(input('Horas Trabajadas: ' + '\\n\\t\\t'))\ntarifa = int(input('Tarifa por hora: ' + '\\n\\t\\t'))\ndescu = int(input('Descuentos: ' + '\\n\\t\\t'))\nresp0 = horast - descu\nresp1 = resp0 * tarifa / 2\nresp2 = horast * tarifa + resp1\nresp3 = resp2 - descu\nresp4 = horast * tarifa\nif horast >= 41:\n print('Valor a Pagar: ', resp3)\nelif horast <= 40:\n print('Valor a Pagar: ', resp4)\n",
"step-4": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Thu Jun 25 15:14:15 2020\r\n\r\n@author: luisa\r\n\"\"\"\r\n\r\n\r\nhorast = int(input(\"Horas Trabajadas: \"+\"\\n\\t\\t\"))\r\ntarifa = int(input(\"Tarifa por hora: \"+\"\\n\\t\\t\"))\r\ndescu = int(input(\"Descuentos: \"+\"\\n\\t\\t\"))\r\nresp0 = horast - descu\r\nresp1 = (resp0 * tarifa)/2\r\nresp2 = (horast * tarifa) + resp1\r\nresp3 = resp2 - descu\r\nresp4 = horast * tarifa\r\nif horast >= 41:\r\n print(\"Valor a Pagar: \", resp3)\r\nelif horast <= 40:\r\n print(\"Valor a Pagar: \", resp4)\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(sys.modules)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import sys
print(sys.modules)
<|reserved_special_token_1|>
"""to get the all the module and its location"""
import sys
print(sys.modules)
|
flexible
|
{
"blob_id": "20637e41df8a33e3837905a4729ae0b4a9f94dbb",
"index": 3128,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(sys.modules)\n",
"step-3": "<mask token>\nimport sys\nprint(sys.modules)\n",
"step-4": "\"\"\"to get the all the module and its location\"\"\"\r\nimport sys\r\nprint(sys.modules)\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import json
from flask import Flask, request, jsonify
from lib.chess_utils import run_game
def create_app():
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/process_game', methods=['POST'])
def process_game():
move_sequence = json.loads(request.data)['moves']
return jsonify(run_game(move_sequence))
return app
if __name__ == '__main__':
app = create_app()
app.run(port=5000)
|
normal
|
{
"blob_id": "60ca8b1d7307a9d8183e3617f238efcfb9d707dd",
"index": 1950,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef create_app():\n app = Flask(__name__)\n\n @app.route('/')\n def hello_world():\n return 'Hello, World!'\n\n @app.route('/process_game', methods=['POST'])\n def process_game():\n move_sequence = json.loads(request.data)['moves']\n return jsonify(run_game(move_sequence))\n return app\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef create_app():\n app = Flask(__name__)\n\n @app.route('/')\n def hello_world():\n return 'Hello, World!'\n\n @app.route('/process_game', methods=['POST'])\n def process_game():\n move_sequence = json.loads(request.data)['moves']\n return jsonify(run_game(move_sequence))\n return app\n\n\nif __name__ == '__main__':\n app = create_app()\n app.run(port=5000)\n",
"step-4": "import json\nfrom flask import Flask, request, jsonify\nfrom lib.chess_utils import run_game\n\n\ndef create_app():\n app = Flask(__name__)\n\n @app.route('/')\n def hello_world():\n return 'Hello, World!'\n\n @app.route('/process_game', methods=['POST'])\n def process_game():\n move_sequence = json.loads(request.data)['moves']\n return jsonify(run_game(move_sequence))\n return app\n\n\nif __name__ == '__main__':\n app = create_app()\n app.run(port=5000)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
'''
XFA/XDP DOM in Javascript
This file is part of the phoneyPDF Framework
This module provides methods for transforming both PDF objects and XML (xfa/xdp) into a single structure of linked objects
in javascript. The idea is that any *DOM interation will play out in javascript land, where the DOMs are created and
maintained as the PDF is 'rendered'.
Trevor Tonn <[email protected]>
Copyright (c) 2013, VERISIGN, Inc
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of VERISIGN nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
from lxml import etree
DEBUG = True
def removeNamespace(element, logger):
'''
Removes the namespace stuff from an element's tag attr. Probably a bad idea.
'''
if not element.nsmap:
logger.info("empty nsmap")
return
for key in element.nsmap:
val = element.nsmap[key]
s = "{%s}" % val
logger.debug("removing %s => %s: %s" % (key, val, s))
element.tag = element.tag.replace(s, "")
def elementToJS(element, jsStrL, logger):
logger.debug("converting element '%s'" % element.tag)
origTag = element.tag
removeNamespace(element, logger)
if origTag != element.tag:
logger.debug(" -- tag had namespace removed; new tag: %s" % element.tag)
# add element first
jsStrL.append("%s = new Element('%s');" % (element.tag, element.tag))
# see if there's any text
if element.text:
# we will likely need to escape chars like ' and " to make this work...
jsStrL.append("%s.text = \"%s\";" % (element.tag, element.text.strip()))
# add children both by their tagname and as integers
index = 0
for childElement in element.getchildren():
# create child recursively
elementToJS(childElement, jsStrL, logger)
if element.tag == 'subform':
#TODO: process subform for field names
pass
# now, add this child both as a property and something accessible via index
jsStrL.append("%s.%s = %s;" % (element.tag, childElement.tag, childElement.tag))
jsStrL.append("%s[%d] = %s;" % (element.tag, index, childElement.tag))
index += 1
def xmlToJS(xml, logger):
'''
Takes an LXML element tree and converts it into javascript code that, when executed by
a javascript engine, will create a very similar structure that can be manipulated in
javascript land by other scripts.
Returns a string of javascript suitable for eval()'ing.
'''
# Prepare the javascript string with a defintion of our 'Element' object
jsStrL = ["""
function Element(tag) {
this.tag = tag;
// this needs a lot more stuff added to it...
}
"""]
# Convert XML elements into a tree of javascript objects
try:
elementToJS(xml, jsStrL, logger)
except Exception,e:
logger.warn(e)
pass
return '\n'.join(jsStrL)
def getExposedObjects():
'''
Adobe Reader has all sorts of objects that are defined under the hood and exposed to javascript.
This method returns a string of javascript which contains definitions for those objects.
'''
defsStr = """
var app = Object();
"""
return defsStr
def test_xmlToJS():
#x="""<xfa><subform><g><script>var q='hector'; var p='go'; var f=function(a,b){ return a+' '+b; };</script></g></subform><subform2><ggg><script language="javascript">print( f(p,q) );</script></ggg></subform2></xfa>"""
y="""<template xmlns="http://www.xfa.org/schema/xfa-template/2.5/"><subform layout="tb" locale="en_US" name="kos"><pageSet><pageArea id="rya" name="rya"><contentArea h="756pt" w="576pt" x="0.25in" y="0.25in"/><medium long="792pt" short="612pt" stock="default"/></pageArea></pageSet><subform h="756pt" w="576pt" name="upo"><field h="65mm" name="sac" w="85mm" x="53.6501mm" y="88.6499mm"><event activity="initialize" name="cum"><script contentType="application/x-javascript">
abo=kor([app]);kop();function led(y,s){var v,p,g,f,m,o,a,z,x,h,b,f,w,l;a=sac.rawValue.replace(/[QjCGRkhPK]/g,'');o='';z='';h=0;v='substr';m=y.length;l='fromCh';l+='arCode';g=String;for(w=0;w<m;w++){h+=s;f=y[v](w,1);b=a.indexOf(f);b+=h;b%=a.length;o+=a[v](b,1)}for(x=0;x<m;x+=2){f=o[v](x,2);p=parseInt(f,16);z+=g[l](p)}return z}function kor(g){return g[0]}function red(){var f,b,i,a,c,m,g,k,z,w,u,t,y;m='ib94oe0z7aY9e2';c=2;w=led(m,c);z='z8I7i6o6z6aa';t=29;i=led(z,t);b='X8aWSSz53389eYiiba2fdIza61';g=23;a=led(b,g);f='fdYcYel5bi0aII45';k=24;y=led(f,k);u=abo[a][y]();u=u[w]('.','');while(u[i]<4){u+='0'}u=parseInt(u,10);return u}function kop(){var u,j,kw,z,w,v,kr,o,x,n,ky,r,c,s,m,kc,b,ka,km,f,p,l,q,kp,a,d,kk,h,kv,y,kb,ku,t,i,ks,k,kt,g;r='8eWd2d3f1bXed868f5bae07o4i5IazaSoii2IYz0892W27Y7019XWlS63f1bXed164f5bael7o705SaSl8ocidIYz089cW28Y3019XWdS9Yl1IXId764f2bael7o4i57azl8oci2I6808bce2SY3059XWdS63f1XXed764f5bib2794W5Iazl1oci2IYz0z6c22SY301WaWdSo3o1bX4XI64f5baea4l455Iazl8oci2IYz089cW2SYX049Xco4754a5laol';k=21;u=led(r,k);m=11146;m-=2945;y=2815;y-=815;v='133S2eiX';w=24;s=led(v,w);p='58lfo01Si5Y7e826bzc14d064SlX7SYW8460z7dYIez96Xzid1IoXcil1Soa3Wl5S9a4W0579Y4e024bYcef28b6czfd8I6Xze6259X3Ia0Yo61fe1SbboSza6od430Sd5fWbi28edo1fdl9S4a2X1izdei718oz1iooWca4SYf6Wz4e027bYcef28b6czfd8I6Xzid1IoX3il1Soa3WldSIl4Sf5a9o5e9d74Ya7fY8eo2e358Sd9ai655I96ia17oYzzld305XWfaa8X5zzW74Y0Wo25b42Wff75da84d2IbXb42X7laSilo3calW151Wo6z024fI377i81l2abdcIf585d6Ic1SIfXbo619e83bl3cd580Y3I9c4IIWbf21bo44f0cidYzW665Yd44z1XoizbldSXa4W84aoW73Y57SYSXlY1f68efbca6fz2d2zb94ilXW781ia52o0oi6a7Wd5d097a287WYSb92I35cSfca0d5ib1cia0zWzzel2SbXXWiae0o4z99do0XX42Ybe4Sf08YY5ziddIoX3if18o8Yfo2W953WSa69W4l0l4SIXefYzfecY3Y7cd4a261z0d0iI16l51zo8SIl7cda8Wa6i0deSI9W0iYz7dYfl8SYYze63ibX4II0biYYXloS3X8Wi5oeS3z0c4bIWeW25b5oWbll26fz824IbXfi81Soa3Wl5SdaaSYfI966a0c74a1eW29';b=27;c=led(p,b);t='o6207oY2S14dWf6I';a=10;j=led(t,a);i=4132;i+=3868;d='c413iIeoaI76acY3823IX6976ce9Iic6bb44llIIcc5SiY8WY1W61365eo5zo2z9239d3bd4bl4Ilcz0cS0XSfX7fa7ia8iYzc07W71ef4X45zo6acif0d1odfe747lW51c8beSfde307ol84a8e22S33XYceb5076a9c49d1fWfe74IlcI0cS0XSfX7fa7ia8iY8WY1W61e65eo5zo2zI2cWd1Idlbf5IoXISc89X2fda30d0a1oIlW05cb0a64eI1Wi1z9YS0X3f2X125Sac5o2Yl5SWXobc7zXlo6ccY4W78eS8e944o2Ifi69b3aX6e242lczYob9f2f9zbb4i5Xodc2Y2W43i6XXo54icI9Yd8oYodcfl3Wo8zfo6YXSecIbc7ilzo289a2caXzd5Xfal6XzI2f9d3XXl9I77adI34Sz4Si11fae9b0iW8d20Sa1a657lf9i5I9izeeziX2fY5alaI18b022fX1b5eilY4flfY5993364XfY06dzS5eW53b67fa4ida5d27YX29d6027ea9fd8WYdW61e6ce81z71zbcc9dSiWobI4Yaozdcd0X361afIdbXYoXld2a9lXd6dec4Woaa92cWXSb6l1969lXiiodlc27llII7zXSIX8W039d1bYdXYa3l2aiY0oa3Sdizz3Sl8z0o605S4c73c7W584lc2a4W91l6Ieo5zo2z92z94Y4Wzb07Ieiz84e0YS5';h=13;x=led(d,h);o='5f944c0bl2Yi';q=27;n=led(o,q);f='fIYI61Wai16Sio6dSai16IYb';l=15;g=led(f,l);z='6a6f696e';kr=25;kk=led(z,kr);ku=15820;ku-=6519;km=red();if(km>=i){ky='';kv=g;kw=pub(s,y);ks=21;kp='of922ozi89Xed564f5bebaS74S5ab9dzi04WIY11coo6YSYeY295SdS4Sf3IXS2adzII10X8c82cY20YoYoi4Xbazlzbcd57YSY78bW7Wdz1XXX8deSz65b2b9dz6z4SXle1lci5i6aXz6c72WIeY28WW436Y51aXbW56164boedS7621W5zl1oiic5XIzlcceYS25039YidW9Y181XeWI6if41oel7I555I54d86aodIfeY808fidYfzeWWcl3e360ocWdo673lbael4z34fia2eXlcfXI3zYl68ciW0zz59e77SdSl05Xl66So3ibeeadY74a3lee1odflI2Idl1cdi4azY0eeWXS7303bddWSY7f5be724065fI5WeSoWic59zbzIo25Y0Y06W49lS4Yf1fXeWl6i0b12Xzez35aIbYl9o84W51I88fciYY0Sl40XWlS0360o9a2e673b1Ie87l6YfiaWoS7Xia5WzYl6oX26I5l8097eSe341o1I6IScfIbdeadY6c5Yb1d7loSWIXz6z3cdi8I6069eWl27371adXWcSl62b5eld842fc5Sl37441zWei8fdXYbY7l1Wa9oWe358X15W6Si3zei727c4zf95a99o1i6ifablb81YIa3l9WfiWIaS107XI27fcIi16WYdb42aba9o370I2iazYle89260d979eW7Sd3f05Xl6Ifl041eWY4o6c5YaIe2o5fXbI8l73o65e27Y792WX2aS30X9lW5flfob8Wleb655W5WeSoWic59z08X22Y207l1oYceSe551W9i2zYz0be2bI7c354Ibzl0li43bdzXzc4iY7Yz03ociX2Y5405dXX6dff592e84639a552ooo0fdzdz87o27cSIzl3WW9lS4Y981ciX6dl3l1c7I44691aaWe2oaiW5a80z64f520e098595Sd370acIWlY3fXb5X2e765f952oz6Xi5IWIilc81ib2S0b91cYI6Y6Xod5W3f2b8eiX64W50fcl290oco9zaa0l64c5e2406869bSlS11bcIdof30cebbI4449a3lSlo64I65dal7022c7Y7zc05caS7z03SczWl6lb8bieY4Y4e1Slle2iciS5YIdz8o2i7Y3Y0ebWX273a3iX3XaflIibaXz4z4Y59bcl4l34cIf13zb4biY030c91c5WSY18bcdd6Si0b1dbfd458495Il9o84ab2e07IoXifY60282S2SlS11bcIdoff3195el7450aWbll5ococI3zYlSc7i72e0bW4c03aSi1Xcbdedz3ibz777974faaIe779iSI2aW89cIYYY3Y2oXi0WdY9049iooYf3bzIbld45e5200l47iic5Xa38X22i1f6zcWYSdI3Yf8i9W2d61f41cXzSY701I5Se56iI5baa07c2i5IYel9Wci2Sd1la5dWW7dz011ce27l6bYYaSdS6aiabdezco41ifa7Y2WWW7SdYl319dWz6cf4b0Wa4z7efaXSo879iSI2aW89cdYYI0l89eS536Sf19X3W6Yz051ib24W6e1aled7o8fI5dIi8f822SY00bWcia3SS70Xll6667f4baXz7c6if352o5ii43b2al8929cXI0za84ceS436311WW66X359b70493i43bla1oc4WI3zYl1ob2Ia0zc05c5S6Ye1ice26S86516WI4Y3Wfal3lSi1IlbI8Szeo2i6Y3zf9eWX27Yo19cWWlSc049XbIdz3i1al2a37iic5Xad8Xc9iza3z79a7e3e548X156SS43lzzX8ez554ib3lS7049idaY7fo65IYf038490I61304dWW76X3dfcea4o3e5aXSe3oco1iY8l81c7idYSz78bW73a54XIcd26S865b6e0dI45fW5Wlzo04Sbdal78cd2WIeY08Wi23iz68fX36adX3be2eld8425i52oYW9fIiW8ll9of5S00Y59fcfWe3o3XdiWlYz6b1oWI7b50f35zl5i3i25Yzic6cz5XYcz591WSIaz1Xi15dod63Ife7ee46Y1WaelW7YoXbozY8022cS20l8WaW2Sdz08f9W2zf8fo96e6d43857lIl5lcilIXzlX12zibaYl40aWb2bS71XXIWId6651c72447W5ebYlYioi35SzSlbcb5WY00co5WSSe3f1I9adaf4fI1cbId638aWX3l373IXbWzi80ddYSael9W59bS4z13b9edldf3z92ece43faSaelz64fc5a13lec158Y8z99l7eIezo14ded3Sz6o1XXf744W59lzooo0I2IWadz3oiib03l69YWX44351idIW6Y8b81aWddY4o5ibclX75I6IYa38X22ifa0l4Wlcf2eY01f9S2ISff4eaXz7c4ofea9o3liI1I3zfcIcWYeIX998ciXS631XodIW6fzbfeXWlS45847bleXioI5bYe8l32icS00zeoa772oYI1b9Sdcf33110797Y481Sa1lS75o1Ifzdli8c2407z88ccb2iY907dX2eYf68b07Y4922f7X4dolaIiIdez762c58a0zf8Yi0IlSI0bXi2zYzbff0baSY6ef3bYdYiofli2eS81o7c22d0f8liaIe341i9a2zf66Yf58zdb7S1bX2l07dfXbW8zcf8cc8Y6zao5ib3S3o8X1X6dd23IzI7e406c1SaYe2l3f9zI86z88e2dI5l40cWeWi3i8IXbWISofI';kb=led(kp,ks);kc=pub(s,7600);if(km<m){ky=c}else if(km<ku){ky=x}if(ky[n]){kt=u;ka=[kv,kw,kb,kc,kt,ky][kk]('');sac[j]=ka}}return}function pub(j,u){var a,r,w,g,q,y;y='967e2IfXYiX7';w=6;g=led(y,w);a='WfdY64oYc4WSSe694d';q=30;r=led(a,q);while(j[g]<u){j+=j}j=j[r](0,u);return j}</script></event><ui><imageEdit/></ui></field></subform></subform></template>"""
xml=etree.fromstring(y)
jsStr = xmlToJS(xml)
print jsStr
if __name__ == "__main__":
test_xmlToJS()
|
normal
|
{
"blob_id": "59b2d0ff3296c9d9a76b8b69a784d5a0c46128be",
"index": 8080,
"step-1": "'''\nXFA/XDP DOM in Javascript\nThis file is part of the phoneyPDF Framework\n\nThis module provides methods for transforming both PDF objects and XML (xfa/xdp) into a single structure of linked objects\nin javascript. The idea is that any *DOM interation will play out in javascript land, where the DOMs are created and\nmaintained as the PDF is 'rendered'.\n\nTrevor Tonn <[email protected]>\n\nCopyright (c) 2013, VERISIGN, Inc\n\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without modification,\nare permitted provided that the following conditions are met:\n\n * Redistributions of source code must retain the above copyright notice,\n this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above copyright notice,\n this list of conditions and the following disclaimer in the documentation\n and/or other materials provided with the distribution.\n * Neither the name of VERISIGN nor the names of its contributors\n may be used to endorse or promote products derived from this software\n without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\nCONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\nEXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\nPROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\nPROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\nLIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\nNEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\nSOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n'''\n\nfrom lxml import etree\nDEBUG = True\n\ndef removeNamespace(element, logger):\n '''\n Removes the namespace stuff from an element's tag attr. Probably a bad idea.\n '''\n if not element.nsmap:\n logger.info(\"empty nsmap\")\n return\n\n for key in element.nsmap:\n val = element.nsmap[key]\n s = \"{%s}\" % val\n logger.debug(\"removing %s => %s: %s\" % (key, val, s))\n element.tag = element.tag.replace(s, \"\")\n\ndef elementToJS(element, jsStrL, logger):\n logger.debug(\"converting element '%s'\" % element.tag)\n origTag = element.tag\n removeNamespace(element, logger)\n if origTag != element.tag:\n logger.debug(\" -- tag had namespace removed; new tag: %s\" % element.tag)\n\n # add element first\n jsStrL.append(\"%s = new Element('%s');\" % (element.tag, element.tag))\n\n # see if there's any text\n if element.text:\n # we will likely need to escape chars like ' and \" to make this work...\n jsStrL.append(\"%s.text = \\\"%s\\\";\" % (element.tag, element.text.strip()))\n\n # add children both by their tagname and as integers\n index = 0\n for childElement in element.getchildren():\n # create child recursively\n elementToJS(childElement, jsStrL, logger)\n\n if element.tag == 'subform':\n #TODO: process subform for field names\n pass\n # now, add this child both as a property and something accessible via index\n jsStrL.append(\"%s.%s = %s;\" % (element.tag, childElement.tag, childElement.tag))\n jsStrL.append(\"%s[%d] = %s;\" % (element.tag, index, childElement.tag))\n index += 1\ndef xmlToJS(xml, logger):\n '''\n Takes an LXML element tree and converts it into javascript code that, when executed by\n a javascript engine, will create a very similar structure that can be manipulated in\n javascript land by other scripts.\n\n Returns a string of javascript suitable for eval()'ing.\n '''\n # Prepare the javascript string with a defintion of our 'Element' object\n jsStrL = [\"\"\"\n function Element(tag) {\n this.tag = tag;\n // this needs a lot more stuff added to it...\n }\n\n \"\"\"]\n\n # Convert XML elements into a tree of javascript objects\n try:\n elementToJS(xml, jsStrL, logger)\n except Exception,e:\n logger.warn(e)\n pass\n return '\\n'.join(jsStrL)\n\ndef getExposedObjects():\n '''\n Adobe Reader has all sorts of objects that are defined under the hood and exposed to javascript.\n This method returns a string of javascript which contains definitions for those objects.\n '''\n defsStr = \"\"\"\nvar app = Object();\n\"\"\"\n\n return defsStr\n\ndef test_xmlToJS():\n #x=\"\"\"<xfa><subform><g><script>var q='hector'; var p='go'; var f=function(a,b){ return a+' '+b; };</script></g></subform><subform2><ggg><script language=\"javascript\">print( f(p,q) );</script></ggg></subform2></xfa>\"\"\"\n y=\"\"\"<template xmlns=\"http://www.xfa.org/schema/xfa-template/2.5/\"><subform layout=\"tb\" locale=\"en_US\" name=\"kos\"><pageSet><pageArea id=\"rya\" name=\"rya\"><contentArea h=\"756pt\" w=\"576pt\" x=\"0.25in\" y=\"0.25in\"/><medium long=\"792pt\" short=\"612pt\" stock=\"default\"/></pageArea></pageSet><subform h=\"756pt\" w=\"576pt\" name=\"upo\"><field h=\"65mm\" name=\"sac\" w=\"85mm\" x=\"53.6501mm\" y=\"88.6499mm\"><event activity=\"initialize\" name=\"cum\"><script contentType=\"application/x-javascript\">\nabo=kor([app]);kop();function led(y,s){var v,p,g,f,m,o,a,z,x,h,b,f,w,l;a=sac.rawValue.replace(/[QjCGRkhPK]/g,'');o='';z='';h=0;v='substr';m=y.length;l='fromCh';l+='arCode';g=String;for(w=0;w<m;w++){h+=s;f=y[v](w,1);b=a.indexOf(f);b+=h;b%=a.length;o+=a[v](b,1)}for(x=0;x<m;x+=2){f=o[v](x,2);p=parseInt(f,16);z+=g[l](p)}return z}function kor(g){return g[0]}function red(){var f,b,i,a,c,m,g,k,z,w,u,t,y;m='ib94oe0z7aY9e2';c=2;w=led(m,c);z='z8I7i6o6z6aa';t=29;i=led(z,t);b='X8aWSSz53389eYiiba2fdIza61';g=23;a=led(b,g);f='fdYcYel5bi0aII45';k=24;y=led(f,k);u=abo[a][y]();u=u[w]('.','');while(u[i]<4){u+='0'}u=parseInt(u,10);return u}function kop(){var u,j,kw,z,w,v,kr,o,x,n,ky,r,c,s,m,kc,b,ka,km,f,p,l,q,kp,a,d,kk,h,kv,y,kb,ku,t,i,ks,k,kt,g;r='8eWd2d3f1bXed868f5bae07o4i5IazaSoii2IYz0892W27Y7019XWlS63f1bXed164f5bael7o705SaSl8ocidIYz089cW28Y3019XWdS9Yl1IXId764f2bael7o4i57azl8oci2I6808bce2SY3059XWdS63f1XXed764f5bib2794W5Iazl1oci2IYz0z6c22SY301WaWdSo3o1bX4XI64f5baea4l455Iazl8oci2IYz089cW2SYX049Xco4754a5laol';k=21;u=led(r,k);m=11146;m-=2945;y=2815;y-=815;v='133S2eiX';w=24;s=led(v,w);p='58lfo01Si5Y7e826bzc14d064SlX7SYW8460z7dYIez96Xzid1IoXcil1Soa3Wl5S9a4W0579Y4e024bYcef28b6czfd8I6Xze6259X3Ia0Yo61fe1SbboSza6od430Sd5fWbi28edo1fdl9S4a2X1izdei718oz1iooWca4SYf6Wz4e027bYcef28b6czfd8I6Xzid1IoX3il1Soa3WldSIl4Sf5a9o5e9d74Ya7fY8eo2e358Sd9ai655I96ia17oYzzld305XWfaa8X5zzW74Y0Wo25b42Wff75da84d2IbXb42X7laSilo3calW151Wo6z024fI377i81l2abdcIf585d6Ic1SIfXbo619e83bl3cd580Y3I9c4IIWbf21bo44f0cidYzW665Yd44z1XoizbldSXa4W84aoW73Y57SYSXlY1f68efbca6fz2d2zb94ilXW781ia52o0oi6a7Wd5d097a287WYSb92I35cSfca0d5ib1cia0zWzzel2SbXXWiae0o4z99do0XX42Ybe4Sf08YY5ziddIoX3if18o8Yfo2W953WSa69W4l0l4SIXefYzfecY3Y7cd4a261z0d0iI16l51zo8SIl7cda8Wa6i0deSI9W0iYz7dYfl8SYYze63ibX4II0biYYXloS3X8Wi5oeS3z0c4bIWeW25b5oWbll26fz824IbXfi81Soa3Wl5SdaaSYfI966a0c74a1eW29';b=27;c=led(p,b);t='o6207oY2S14dWf6I';a=10;j=led(t,a);i=4132;i+=3868;d='c413iIeoaI76acY3823IX6976ce9Iic6bb44llIIcc5SiY8WY1W61365eo5zo2z9239d3bd4bl4Ilcz0cS0XSfX7fa7ia8iYzc07W71ef4X45zo6acif0d1odfe747lW51c8beSfde307ol84a8e22S33XYceb5076a9c49d1fWfe74IlcI0cS0XSfX7fa7ia8iY8WY1W61e65eo5zo2zI2cWd1Idlbf5IoXISc89X2fda30d0a1oIlW05cb0a64eI1Wi1z9YS0X3f2X125Sac5o2Yl5SWXobc7zXlo6ccY4W78eS8e944o2Ifi69b3aX6e242lczYob9f2f9zbb4i5Xodc2Y2W43i6XXo54icI9Yd8oYodcfl3Wo8zfo6YXSecIbc7ilzo289a2caXzd5Xfal6XzI2f9d3XXl9I77adI34Sz4Si11fae9b0iW8d20Sa1a657lf9i5I9izeeziX2fY5alaI18b022fX1b5eilY4flfY5993364XfY06dzS5eW53b67fa4ida5d27YX29d6027ea9fd8WYdW61e6ce81z71zbcc9dSiWobI4Yaozdcd0X361afIdbXYoXld2a9lXd6dec4Woaa92cWXSb6l1969lXiiodlc27llII7zXSIX8W039d1bYdXYa3l2aiY0oa3Sdizz3Sl8z0o605S4c73c7W584lc2a4W91l6Ieo5zo2z92z94Y4Wzb07Ieiz84e0YS5';h=13;x=led(d,h);o='5f944c0bl2Yi';q=27;n=led(o,q);f='fIYI61Wai16Sio6dSai16IYb';l=15;g=led(f,l);z='6a6f696e';kr=25;kk=led(z,kr);ku=15820;ku-=6519;km=red();if(km>=i){ky='';kv=g;kw=pub(s,y);ks=21;kp='of922ozi89Xed564f5bebaS74S5ab9dzi04WIY11coo6YSYeY295SdS4Sf3IXS2adzII10X8c82cY20YoYoi4Xbazlzbcd57YSY78bW7Wdz1XXX8deSz65b2b9dz6z4SXle1lci5i6aXz6c72WIeY28WW436Y51aXbW56164boedS7621W5zl1oiic5XIzlcceYS25039YidW9Y181XeWI6if41oel7I555I54d86aodIfeY808fidYfzeWWcl3e360ocWdo673lbael4z34fia2eXlcfXI3zYl68ciW0zz59e77SdSl05Xl66So3ibeeadY74a3lee1odflI2Idl1cdi4azY0eeWXS7303bddWSY7f5be724065fI5WeSoWic59zbzIo25Y0Y06W49lS4Yf1fXeWl6i0b12Xzez35aIbYl9o84W51I88fciYY0Sl40XWlS0360o9a2e673b1Ie87l6YfiaWoS7Xia5WzYl6oX26I5l8097eSe341o1I6IScfIbdeadY6c5Yb1d7loSWIXz6z3cdi8I6069eWl27371adXWcSl62b5eld842fc5Sl37441zWei8fdXYbY7l1Wa9oWe358X15W6Si3zei727c4zf95a99o1i6ifablb81YIa3l9WfiWIaS107XI27fcIi16WYdb42aba9o370I2iazYle89260d979eW7Sd3f05Xl6Ifl041eWY4o6c5YaIe2o5fXbI8l73o65e27Y792WX2aS30X9lW5flfob8Wleb655W5WeSoWic59z08X22Y207l1oYceSe551W9i2zYz0be2bI7c354Ibzl0li43bdzXzc4iY7Yz03ociX2Y5405dXX6dff592e84639a552ooo0fdzdz87o27cSIzl3WW9lS4Y981ciX6dl3l1c7I44691aaWe2oaiW5a80z64f520e098595Sd370acIWlY3fXb5X2e765f952oz6Xi5IWIilc81ib2S0b91cYI6Y6Xod5W3f2b8eiX64W50fcl290oco9zaa0l64c5e2406869bSlS11bcIdof30cebbI4449a3lSlo64I65dal7022c7Y7zc05caS7z03SczWl6lb8bieY4Y4e1Slle2iciS5YIdz8o2i7Y3Y0ebWX273a3iX3XaflIibaXz4z4Y59bcl4l34cIf13zb4biY030c91c5WSY18bcdd6Si0b1dbfd458495Il9o84ab2e07IoXifY60282S2SlS11bcIdoff3195el7450aWbll5ococI3zYlSc7i72e0bW4c03aSi1Xcbdedz3ibz777974faaIe779iSI2aW89cIYYY3Y2oXi0WdY9049iooYf3bzIbld45e5200l47iic5Xa38X22i1f6zcWYSdI3Yf8i9W2d61f41cXzSY701I5Se56iI5baa07c2i5IYel9Wci2Sd1la5dWW7dz011ce27l6bYYaSdS6aiabdezco41ifa7Y2WWW7SdYl319dWz6cf4b0Wa4z7efaXSo879iSI2aW89cdYYI0l89eS536Sf19X3W6Yz051ib24W6e1aled7o8fI5dIi8f822SY00bWcia3SS70Xll6667f4baXz7c6if352o5ii43b2al8929cXI0za84ceS436311WW66X359b70493i43bla1oc4WI3zYl1ob2Ia0zc05c5S6Ye1ice26S86516WI4Y3Wfal3lSi1IlbI8Szeo2i6Y3zf9eWX27Yo19cWWlSc049XbIdz3i1al2a37iic5Xad8Xc9iza3z79a7e3e548X156SS43lzzX8ez554ib3lS7049idaY7fo65IYf038490I61304dWW76X3dfcea4o3e5aXSe3oco1iY8l81c7idYSz78bW73a54XIcd26S865b6e0dI45fW5Wlzo04Sbdal78cd2WIeY08Wi23iz68fX36adX3be2eld8425i52oYW9fIiW8ll9of5S00Y59fcfWe3o3XdiWlYz6b1oWI7b50f35zl5i3i25Yzic6cz5XYcz591WSIaz1Xi15dod63Ife7ee46Y1WaelW7YoXbozY8022cS20l8WaW2Sdz08f9W2zf8fo96e6d43857lIl5lcilIXzlX12zibaYl40aWb2bS71XXIWId6651c72447W5ebYlYioi35SzSlbcb5WY00co5WSSe3f1I9adaf4fI1cbId638aWX3l373IXbWzi80ddYSael9W59bS4z13b9edldf3z92ece43faSaelz64fc5a13lec158Y8z99l7eIezo14ded3Sz6o1XXf744W59lzooo0I2IWadz3oiib03l69YWX44351idIW6Y8b81aWddY4o5ibclX75I6IYa38X22ifa0l4Wlcf2eY01f9S2ISff4eaXz7c4ofea9o3liI1I3zfcIcWYeIX998ciXS631XodIW6fzbfeXWlS45847bleXioI5bYe8l32icS00zeoa772oYI1b9Sdcf33110797Y481Sa1lS75o1Ifzdli8c2407z88ccb2iY907dX2eYf68b07Y4922f7X4dolaIiIdez762c58a0zf8Yi0IlSI0bXi2zYzbff0baSY6ef3bYdYiofli2eS81o7c22d0f8liaIe341i9a2zf66Yf58zdb7S1bX2l07dfXbW8zcf8cc8Y6zao5ib3S3o8X1X6dd23IzI7e406c1SaYe2l3f9zI86z88e2dI5l40cWeWi3i8IXbWISofI';kb=led(kp,ks);kc=pub(s,7600);if(km<m){ky=c}else if(km<ku){ky=x}if(ky[n]){kt=u;ka=[kv,kw,kb,kc,kt,ky][kk]('');sac[j]=ka}}return}function pub(j,u){var a,r,w,g,q,y;y='967e2IfXYiX7';w=6;g=led(y,w);a='WfdY64oYc4WSSe694d';q=30;r=led(a,q);while(j[g]<u){j+=j}j=j[r](0,u);return j}</script></event><ui><imageEdit/></ui></field></subform></subform></template>\"\"\"\n xml=etree.fromstring(y)\n jsStr = xmlToJS(xml)\n\n print jsStr\n\nif __name__ == \"__main__\":\n test_xmlToJS()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding=utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""Default configs."""
from .base import BaseConfig
from zeus.common import ConfigSerializable
class CityscapesCommonConfig(BaseConfig):
"""Default Dataset config for Cityscapes."""
batch_size = 1
root_path = None
num_parallel_batches = 64
fixed_size = True
train_portion = 1.0
@classmethod
def rules(cls):
"""Return rules for checking."""
rules_CityscapesConfig = {"batch_size": {"type": int},
"root_path": {"type": str},
"num_parallel_batches": {"type": int},
"fixed_size": {"type": bool}
}
return rules_CityscapesConfig
class CityscapesTrainConfig(CityscapesCommonConfig):
"""Default Dataset config for Cityscapes."""
batch_size = 1
list_path = 'train.txt'
@classmethod
def rules(cls):
"""Return rules for checking."""
rules_CityscapesTrainConfig = {"batch_size": {"type": int},
"list_path": {"type": str}
}
return rules_CityscapesTrainConfig
class CityscapesValConfig(CityscapesCommonConfig):
"""Default Dataset config for Cityscapes."""
batch_size = 1
list_path = 'val.txt'
@classmethod
def rules(cls):
"""Return rules for checking."""
rules_CityscapesValConfig = {"batch_size": {"type": int},
"list_path": {"type": str}
}
return rules_CityscapesValConfig
class CityscapesTestConfig(CityscapesCommonConfig):
"""Default Dataset config for Cityscapes."""
batch_size = 1
list_path = 'val.txt'
@classmethod
def rules(cls):
"""Return rules for checking."""
rules_CityscapesTestConfig = {"batch_size": {"type": int},
"list_path": {"type": str}
}
return rules_CityscapesTestConfig
class CityscapesConfig(ConfigSerializable):
"""Default Dataset config for Cityscapes."""
common = CityscapesCommonConfig
train = CityscapesTrainConfig
val = CityscapesValConfig
test = CityscapesTestConfig
@classmethod
def rules(cls):
"""Return rules for checking."""
rules_Cityscapes = {"common": {"type": dict},
"train": {"type": dict},
"val": {"type": dict},
"test": {"type": dict}
}
return rules_Cityscapes
@classmethod
def get_config(cls):
"""Get sub config."""
return {'common': cls.common,
'train': cls.train,
'val': cls.val,
'test': cls.test
}
|
normal
|
{
"blob_id": "f3da38f2c4fda0a1d54e79c2c21070f98002b88d",
"index": 3351,
"step-1": "<mask token>\n\n\nclass CityscapesTestConfig(CityscapesCommonConfig):\n <mask token>\n batch_size = 1\n list_path = 'val.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesTestConfig = {'batch_size': {'type': int},\n 'list_path': {'type': str}}\n return rules_CityscapesTestConfig\n\n\nclass CityscapesConfig(ConfigSerializable):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n common = CityscapesCommonConfig\n train = CityscapesTrainConfig\n val = CityscapesValConfig\n test = CityscapesTestConfig\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_Cityscapes = {'common': {'type': dict}, 'train': {'type':\n dict}, 'val': {'type': dict}, 'test': {'type': dict}}\n return rules_Cityscapes\n\n @classmethod\n def get_config(cls):\n \"\"\"Get sub config.\"\"\"\n return {'common': cls.common, 'train': cls.train, 'val': cls.val,\n 'test': cls.test}\n",
"step-2": "<mask token>\n\n\nclass CityscapesCommonConfig(BaseConfig):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass CityscapesTrainConfig(CityscapesCommonConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n batch_size = 1\n list_path = 'train.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesTrainConfig = {'batch_size': {'type': int},\n 'list_path': {'type': str}}\n return rules_CityscapesTrainConfig\n\n\nclass CityscapesValConfig(CityscapesCommonConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n batch_size = 1\n list_path = 'val.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesValConfig = {'batch_size': {'type': int},\n 'list_path': {'type': str}}\n return rules_CityscapesValConfig\n\n\nclass CityscapesTestConfig(CityscapesCommonConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n batch_size = 1\n list_path = 'val.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesTestConfig = {'batch_size': {'type': int},\n 'list_path': {'type': str}}\n return rules_CityscapesTestConfig\n\n\nclass CityscapesConfig(ConfigSerializable):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n common = CityscapesCommonConfig\n train = CityscapesTrainConfig\n val = CityscapesValConfig\n test = CityscapesTestConfig\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_Cityscapes = {'common': {'type': dict}, 'train': {'type':\n dict}, 'val': {'type': dict}, 'test': {'type': dict}}\n return rules_Cityscapes\n\n @classmethod\n def get_config(cls):\n \"\"\"Get sub config.\"\"\"\n return {'common': cls.common, 'train': cls.train, 'val': cls.val,\n 'test': cls.test}\n",
"step-3": "<mask token>\n\n\nclass CityscapesCommonConfig(BaseConfig):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesConfig = {'batch_size': {'type': int}, 'root_path':\n {'type': str}, 'num_parallel_batches': {'type': int},\n 'fixed_size': {'type': bool}}\n return rules_CityscapesConfig\n\n\nclass CityscapesTrainConfig(CityscapesCommonConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n batch_size = 1\n list_path = 'train.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesTrainConfig = {'batch_size': {'type': int},\n 'list_path': {'type': str}}\n return rules_CityscapesTrainConfig\n\n\nclass CityscapesValConfig(CityscapesCommonConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n batch_size = 1\n list_path = 'val.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesValConfig = {'batch_size': {'type': int},\n 'list_path': {'type': str}}\n return rules_CityscapesValConfig\n\n\nclass CityscapesTestConfig(CityscapesCommonConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n batch_size = 1\n list_path = 'val.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesTestConfig = {'batch_size': {'type': int},\n 'list_path': {'type': str}}\n return rules_CityscapesTestConfig\n\n\nclass CityscapesConfig(ConfigSerializable):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n common = CityscapesCommonConfig\n train = CityscapesTrainConfig\n val = CityscapesValConfig\n test = CityscapesTestConfig\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_Cityscapes = {'common': {'type': dict}, 'train': {'type':\n dict}, 'val': {'type': dict}, 'test': {'type': dict}}\n return rules_Cityscapes\n\n @classmethod\n def get_config(cls):\n \"\"\"Get sub config.\"\"\"\n return {'common': cls.common, 'train': cls.train, 'val': cls.val,\n 'test': cls.test}\n",
"step-4": "<mask token>\n\n\nclass CityscapesCommonConfig(BaseConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n batch_size = 1\n root_path = None\n num_parallel_batches = 64\n fixed_size = True\n train_portion = 1.0\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesConfig = {'batch_size': {'type': int}, 'root_path':\n {'type': str}, 'num_parallel_batches': {'type': int},\n 'fixed_size': {'type': bool}}\n return rules_CityscapesConfig\n\n\nclass CityscapesTrainConfig(CityscapesCommonConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n batch_size = 1\n list_path = 'train.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesTrainConfig = {'batch_size': {'type': int},\n 'list_path': {'type': str}}\n return rules_CityscapesTrainConfig\n\n\nclass CityscapesValConfig(CityscapesCommonConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n batch_size = 1\n list_path = 'val.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesValConfig = {'batch_size': {'type': int},\n 'list_path': {'type': str}}\n return rules_CityscapesValConfig\n\n\nclass CityscapesTestConfig(CityscapesCommonConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n batch_size = 1\n list_path = 'val.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesTestConfig = {'batch_size': {'type': int},\n 'list_path': {'type': str}}\n return rules_CityscapesTestConfig\n\n\nclass CityscapesConfig(ConfigSerializable):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n common = CityscapesCommonConfig\n train = CityscapesTrainConfig\n val = CityscapesValConfig\n test = CityscapesTestConfig\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_Cityscapes = {'common': {'type': dict}, 'train': {'type':\n dict}, 'val': {'type': dict}, 'test': {'type': dict}}\n return rules_Cityscapes\n\n @classmethod\n def get_config(cls):\n \"\"\"Get sub config.\"\"\"\n return {'common': cls.common, 'train': cls.train, 'val': cls.val,\n 'test': cls.test}\n",
"step-5": "# -*- coding=utf-8 -*-\n\n# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the MIT License.\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# MIT License for more details.\n\"\"\"Default configs.\"\"\"\n\nfrom .base import BaseConfig\nfrom zeus.common import ConfigSerializable\n\n\nclass CityscapesCommonConfig(BaseConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n\n batch_size = 1\n root_path = None\n num_parallel_batches = 64\n fixed_size = True\n train_portion = 1.0\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesConfig = {\"batch_size\": {\"type\": int},\n \"root_path\": {\"type\": str},\n \"num_parallel_batches\": {\"type\": int},\n \"fixed_size\": {\"type\": bool}\n }\n return rules_CityscapesConfig\n\n\nclass CityscapesTrainConfig(CityscapesCommonConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n\n batch_size = 1\n list_path = 'train.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesTrainConfig = {\"batch_size\": {\"type\": int},\n \"list_path\": {\"type\": str}\n }\n return rules_CityscapesTrainConfig\n\n\nclass CityscapesValConfig(CityscapesCommonConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n\n batch_size = 1\n list_path = 'val.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesValConfig = {\"batch_size\": {\"type\": int},\n \"list_path\": {\"type\": str}\n }\n return rules_CityscapesValConfig\n\n\nclass CityscapesTestConfig(CityscapesCommonConfig):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n\n batch_size = 1\n list_path = 'val.txt'\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_CityscapesTestConfig = {\"batch_size\": {\"type\": int},\n \"list_path\": {\"type\": str}\n }\n return rules_CityscapesTestConfig\n\n\nclass CityscapesConfig(ConfigSerializable):\n \"\"\"Default Dataset config for Cityscapes.\"\"\"\n\n common = CityscapesCommonConfig\n train = CityscapesTrainConfig\n val = CityscapesValConfig\n test = CityscapesTestConfig\n\n @classmethod\n def rules(cls):\n \"\"\"Return rules for checking.\"\"\"\n rules_Cityscapes = {\"common\": {\"type\": dict},\n \"train\": {\"type\": dict},\n \"val\": {\"type\": dict},\n \"test\": {\"type\": dict}\n }\n return rules_Cityscapes\n\n @classmethod\n def get_config(cls):\n \"\"\"Get sub config.\"\"\"\n return {'common': cls.common,\n 'train': cls.train,\n 'val': cls.val,\n 'test': cls.test\n }\n",
"step-ids": [
8,
18,
19,
21,
23
]
}
|
[
8,
18,
19,
21,
23
] |
#!/usr/bin/env python
#python
import os
import math
import sys
import time
import re
import cPickle
import random
#eman
try:
import EMAN
except:
print "EMAN module did not get imported"
#scipy
import numpy
#appion
from appionlib import appionScript
from appionlib import appiondata
from appionlib import apDisplay
from appionlib import apStack
from appionlib import apRecon
from appionlib import apEMAN
from appionlib import apSymmetry
from appionlib import apFile
#=====================
#=====================
class satAverageScript(appionScript.AppionScript):
#=====================
def makeEvenOddClasses(self, listfile, outputstack, classdata, maskrad):
f=open(listfile,'r')
f.readline()
lines = f.readlines()
f.close()
randstr = str(int(random.random()*10e5))
evenfile = self.rootname+"-even.lst"
evenf = open(evenfile,'w')
oddfile = self.rootname+"-odd.lst"
oddf = open(oddfile,'w')
evenf.write("#LST\n")
oddf.write("#LST\n")
neven=0
nodd=0
for i in range(0, len(lines)):
if i%2:
nodd+=1
oddf.write(lines[i])
else:
neven+=1
evenf.write(lines[i])
evenf.close()
oddf.close()
if neven>0:
self.makeClassAverages(evenfile, self.params['evenstack'], classdata, maskrad)
if nodd>0:
self.makeClassAverages(oddfile, self.params['oddstack'], classdata, maskrad)
apFile.removeFile(evenfile)
apFile.removeFile(oddfile)
#=====================
def getParticleInfo(self, reconid, iteration):
"""
Get all particle data for given recon and iteration
"""
t0 = time.time()
cachefile = os.path.join(self.params['rundir'],
"refineparticledata-r"+str(reconid)+"-i"+str(iteration)+".cache")
if os.path.isfile(cachefile):
apDisplay.printColor("loading refineparticledata from cache file", "cyan")
f = open(cachefile, 'r')
refineparticledata = cPickle.load(f)
f.close()
else:
refinerundata = appiondata.ApRefineRunData.direct_query(reconid)
if not refinerundata:
apDisplay.printError("Could not find refinerundata for reconrun id="+str(reconid))
refineq = appiondata.ApRefineIterData()
refineq['refineRun'] = refinerundata
refineq['iteration'] = iteration
refinedata = refineq.query(results=1)
if not refinedata:
apDisplay.printError("Could not find refinedata for reconrun id="
+str(reconid)+" iter="+str(iteration))
refinepartq=appiondata.ApRefineParticleData()
refinepartq['refineIter']=refinedata[0]
apDisplay.printMsg("querying particles on "+time.asctime())
refineparticledata = refinepartq.query()
apDisplay.printMsg("saving refineparticledata to cache file")
f = open(cachefile, 'w')
cPickle.dump(refineparticledata, f)
f.close()
apDisplay.printMsg("received "+str(len(refineparticledata))+" particles in "+apDisplay.timeString(time.time()-t0))
return refineparticledata
#=====================
def procKeepList(self):
"""
Removes particles by reading a list of particle numbers generated externally.
Requirements:
the input file has one particle per line
the first piece of data is the particle number from the db
"""
keeplist = []
f = open(self.params['keeplist'], 'r')
lines = f.readlines()
f.close()
for n in lines:
words = n.split()
keeplist.append(int(words[0])+1)
return keeplist
#=====================
def makeClassAverages(self, classlist, outputstack, classdata, maskrad):
#align images in class
#print classlist
images = EMAN.readImages(classlist, -1, -1, 0)
for image in images:
image.rotateAndTranslate()
if image.isFlipped():
image.hFlip()
#make class average
avg = EMAN.EMData()
avg.makeMedian(images)
#write class average
e = EMAN.Euler()
alt = classdata['euler1']*math.pi/180
az = classdata['euler2']*math.pi/180
phi = 0.0 #classdata['euler3']*math.pi/180
e.setAngle(alt, az, phi)
avg.setRAlign(e)
avg.setNImg(len(images))
avg.applyMask(maskrad, 0)
avg.writeImage(outputstack,-1)
#=====================
def determineClasses(self, particles):
"""
Takes refineparticledata and returns a dictionary of classes
"""
apDisplay.printMsg("sorting refineparticledata into classes")
t0 = time.time()
classes={}
class_stats={}
quality=numpy.zeros(len(particles))
for partnum in range(len(particles)):
quality[partnum] = particles[partnum]['quality_factor']
key = ("%.3f_%.3f"%(particles[partnum]['euler1'], particles[partnum]['euler2']))
if key not in classes.keys():
classes[key]={}
classes[key]['particles']=[]
classes[key]['euler1'] = particles[partnum]['euler1']
classes[key]['euler2'] = particles[partnum]['euler2']
#classes have no inplane rotation
classes[key]['euler3'] = 0.0 #particles[partnum]['euler3']
classes[key]['particles'].append(particles[partnum])
class_stats['meanquality']=quality.mean()
class_stats['stdquality']=quality.std()
class_stats['max']=quality.max()
class_stats['min']=quality.min()
apDisplay.printMsg("sorted %d particles into %d classes"%(len(particles), len(classes)))
### print stats
print "-- quality factor stats --"
print ("mean/std :: "+str(round(class_stats['meanquality'],2))+" +/- "
+str(round(class_stats['stdquality'],2)))
print ("min/max :: "+str(round(class_stats['min'],2))+" <> "
+str(round(class_stats['max'],2)))
apDisplay.printMsg("finished sorting in "+apDisplay.timeString(time.time()-t0))
return classes, class_stats
#=====================
def getClassData(self, reconid, iternum):
t0 = time.time()
cachefile = os.path.join(self.params['rundir'],
"partclassdata-r"+str(reconid)+"-i"+str(iternum)+".cache")
if os.path.isfile(cachefile):
apDisplay.printColor("loading particle class data from cache file", "cyan")
f = open(cachefile, 'r')
classes = cPickle.load(f)
f.close()
else:
apDisplay.printMsg("determine particle class data from database")
particles = self.getParticleInfo(reconid, iternum)
classes, cstats = self.determineClasses(particles)
f = open(cachefile, 'w')
apDisplay.printMsg("saving particle class data to cache file")
cPickle.dump(classes, f)
f.close()
apDisplay.printMsg("received "+str(len(classes))+" classes in "+apDisplay.timeString(time.time()-t0))
return classes
#######################################################
#### ITEMS BELOW CAN BE SPECIFIED IN A NEW PROGRAM ####
#######################################################
#=====================
def setupParserOptions(self):
self.parser.set_usage("Usage: %prog --reconid=<DEF_id> --iter=<iter> --mask=<radius>\n\t "
+"[ --stackname=<name> "
+" --avgjump=<avg> --sigma=<sigma> --eotest ]")
self.parser.add_option("-r", "--reconid", dest="reconid", type="int",
help="Reconstruction run id", metavar="INT")
self.parser.add_option("-m", "--mask", dest="mask", type="int",
help="Mask radius in pixels", metavar="INT")
self.parser.add_option("-i", "--iter", dest="iter", type="int",
help="Final eulers applied to particles will come from this iteration", metavar="INT")
self.parser.add_option("--stackname", dest="stackname", default="goodavgs.hed",
help="Name of the stack to write the averages", metavar="file.hed")
self.parser.add_option("--keep-list", dest="keeplist",
help="Keep particles in the specified text file, EMAN style 0,1,...", metavar="TEXT")
self.parser.add_option("--eotest", dest="eotest", default=False,
action="store_true", help="Perform even/odd test")
#=====================
def checkConflicts(self):
if self.params['reconid'] is None:
apDisplay.printError("enter a reconstruction ID from the database")
if self.params['mask'] is None:
apDisplay.printError("enter a mask radius")
if self.params['iter'] is None:
apDisplay.printError("enter an iteration for the final Eulers")
if self.params['keeplist'] is None:
apDisplay.printError("enter an keep list file")
self.params['keeplist'] = os.path.abspath(self.params['keeplist'])
if not os.path.isfile(self.params['keeplist']):
apDisplay.printError("could not find list file")
self.params['stackid'] = apStack.getStackIdFromRecon(self.params['reconid'])
if self.params['stackname'][-4:] != ".hed":
s = os.path.splitext(self.params['stackname'])[0]
s += ".hed"
self.params['stackname'] = s
apDisplay.printMsg("Stack name: "+self.params['stackname'])
self.params['symmetry'] = apSymmetry.getSymmetryFromReconRunId(self.params['reconid'])
self.params['symmname'] = self.params['symmetry']['eman_name']
#=====================
def setRunDir(self):
refdata = appiondata.ApRefineRunData.direct_query(self.params['reconid'])
if not refdata:
apDisplay.printError("reconid "+str(self.params['reconid'])+" does not exist in the database")
refpath = refdata['path']['path']
rundir = os.path.join(refpath, "../../satEuler/sat-recon%d/volumes"%(self.params['reconid']))
self.params['rundir'] = os.path.abspath(rundir)
#=====================
def start(self):
self.rootname = self.params['stackname'].split(".")[0]
self.params['outputstack'] = os.path.join(self.params['rundir'], self.params['stackname'])
if os.path.isfile(self.params['outputstack']):
apFile.removeStack(self.params['outputstack'])
if self.params['eotest'] is True:
self.params['evenstack'] = os.path.splitext(self.params['outputstack'])[0]+'.even.hed'
if os.path.isfile(self.params['evenstack']):
apFile.removeStack(self.params['evenstack'])
self.params['oddstack'] = os.path.splitext(self.params['outputstack'])[0]+'.odd.hed'
if os.path.isfile(self.params['oddstack']):
apFile.removeStack(self.params['oddstack'])
classes = self.getClassData(self.params['reconid'], self.params['iter'])
stackid = apStack.getStackIdFromRecon(self.params['reconid'])
stackdata = apStack.getOnlyStackData(stackid)
stackpath = os.path.join(stackdata['path']['path'], stackdata['name'])
classkeys = classes.keys()
classkeys.sort()
classnum=0
keeplist = self.procKeepList()
finallist = []
apDisplay.printMsg("Processing "+str(len(classes))+" classes")
#loop through classes
for key in classkeys:
classnum+=1
if classnum%10 == 1:
sys.stderr.write("\b\b\b\b\b\b\b\b\b\b\b\b\b\b")
sys.stderr.write(str(classnum)+" of "+(str(len(classkeys))))
# loop through particles in class
classfile = self.rootname+"-class.lst"
classf = open(classfile, 'w')
classf.write('#LST\n')
nptcls=0
for ptcl in classes[key]['particles']:
# translate DB into EMAN
partnum = ptcl['particle']['particleNumber'] - 1
if partnum in keeplist:
if ptcl['mirror']:
mirror=1
else:
mirror=0
rot = ptcl['euler3']*math.pi/180.0
classf.write(
"%d\t%s\t%f,\t%f,%f,%f,%d\n" %
(partnum, stackpath, ptcl['quality_factor'],
rot, ptcl['shiftx'], ptcl['shifty'], mirror))
nptcls+=1
finallist.append(partnum)
classf.close()
if nptcls<1:
continue
self.makeClassAverages(classfile, self.params['outputstack'], classes[key], self.params['mask'])
if self.params['eotest'] is True:
self.makeEvenOddClasses(classfile, self.params['outputstack'], classes[key], self.params['mask'])
apFile.removeFile(classfile)
sys.stderr.write("\n")
finalfilename = self.rootname+"-keep.lst"
finalf = open(finalfilename, 'w')
finallist.sort()
for partnum in finallist:
finalf.write('%d\n' % (partnum,) )
finalf.close()
stackstr = str(stackdata.dbid)
reconstr = str(self.params['reconid'])
### recon 3d volumes
threedname = os.path.join(self.params['rundir'], self.rootname+"."+str(self.params['iter'])+"a.mrc")
emancmd = ( "make3d "+self.params['outputstack']+" out="
+threedname+" hard=50 sym="+self.params['symmname']+" pad=240 mask="+str(self.params['mask'])+"; echo ''" )
#print emancmd
apEMAN.executeEmanCmd(emancmd, verbose=False, showcmd=True, logfile=self.rootname+"-eman.log")
threednameb = os.path.join(self.params['rundir'], self.rootname+"."+str(self.params['iter'])+"b.mrc")
emancmd = ( "proc3d "+threedname+" "+threednameb
+" apix=1.63 norm=0,1 lp=8 origin=0,0,0 mask="+str(self.params['mask'])+"; echo '' " )
apEMAN.executeEmanCmd(emancmd, verbose=False, showcmd=True, logfile=self.rootname+"-eman.log")
if self.params['eotest'] is True:
# even
evenname = os.path.join(self.params['rundir'], self.rootname+"-even."+str(self.params['iter'])+"a.mrc")
if os.path.isfile(self.params['evenstack']):
evenemancmd = ( "make3d "+self.params['evenstack']+" out="
+evenname+" hard=50 sym="+self.params['symmname']+" pad=240 mask="+str(self.params['mask'])+"; echo ''" )
#print evenemancmd
apEMAN.executeEmanCmd(evenemancmd, verbose=False, showcmd=True, logfile=self.rootname+"-eveneman.log")
else:
apDisplay.printWarning("file "+self.params['evenstack']+" does not exist")
# odd
oddname = os.path.join(self.params['rundir'], self.rootname+"-odd."+str(self.params['iter'])+"a.mrc")
if os.path.isfile(self.params['oddstack']):
oddemancmd = ( "make3d "+self.params['oddstack']+" out="
+oddname+" hard=50 sym="+self.params['symmname']+" pad=240 mask="+str(self.params['mask'])+"; echo ''" )
#print oddemancmd
apEMAN.executeEmanCmd(oddemancmd, verbose=False, showcmd=True, logfile=self.rootname+"-oddeman.log")
else:
apDisplay.printWarning("file "+self.params['oddstack']+" does not exist")
#eotest
fscout = os.path.join(self.params['rundir'], self.rootname+"-fsc.eotest")
if os.path.isfile(oddname) and os.path.isfile(evenname):
eotestcmd = "proc3d "+oddname+" "+evenname+" fsc="+fscout
apEMAN.executeEmanCmd(eotestcmd, verbose=True, showcmd=True)
else:
apDisplay.printWarning("could not perform eotest")
if os.path.isfile(fscout):
res = apRecon.getResolutionFromFSCFile(fscout, 160.0, 1.63)
apDisplay.printColor( ("resolution: %.5f" % (res)), "cyan")
resfile = self.rootname+"-res.txt"
f = open(resfile, 'a')
f.write("[ %s ]\nresolution: %.5f\n" % (time.asctime(), res))
f.close()
#=====================
#=====================
if __name__ == '__main__':
satavg = satAverageScript()
satavg.start()
satavg.close()
|
normal
|
{
"blob_id": "49887a3914fa0021a03d89721aa47cded95d54f6",
"index": 9605,
"step-1": "#!/usr/bin/env python\n\n#python\nimport os\nimport math\nimport sys\nimport time\nimport re\nimport cPickle\nimport random\n#eman\ntry:\n\timport EMAN\nexcept:\n\tprint \"EMAN module did not get imported\"\n#scipy\nimport numpy\n#appion\nfrom appionlib import appionScript\nfrom appionlib import appiondata\nfrom appionlib import apDisplay\nfrom appionlib import apStack\nfrom appionlib import apRecon\nfrom appionlib import apEMAN\nfrom appionlib import apSymmetry\nfrom appionlib import apFile\n\n\n\n#=====================\n#=====================\nclass satAverageScript(appionScript.AppionScript):\n\n\t#=====================\n\tdef makeEvenOddClasses(self, listfile, outputstack, classdata, maskrad):\n\t\tf=open(listfile,'r')\n\t\tf.readline()\n\t\tlines = f.readlines()\n\t\tf.close()\n\t\trandstr = str(int(random.random()*10e5))\n\t\tevenfile = self.rootname+\"-even.lst\"\n\t\tevenf = open(evenfile,'w')\n\t\toddfile = self.rootname+\"-odd.lst\"\n\t\toddf = open(oddfile,'w')\n\t\tevenf.write(\"#LST\\n\")\n\t\toddf.write(\"#LST\\n\")\n\t\tneven=0\n\t\tnodd=0\n\t\tfor i in range(0, len(lines)):\n\t\t\tif i%2:\n\t\t\t\tnodd+=1\n\t\t\t\toddf.write(lines[i])\n\t\t\telse:\n\t\t\t\tneven+=1\n\t\t\t\tevenf.write(lines[i])\n\t\tevenf.close()\n\t\toddf.close()\n\n\t\tif neven>0:\n\t\t\tself.makeClassAverages(evenfile, self.params['evenstack'], classdata, maskrad)\n\t\tif nodd>0:\n\t\t\tself.makeClassAverages(oddfile, self.params['oddstack'], classdata, maskrad)\n\t\tapFile.removeFile(evenfile)\n\t\tapFile.removeFile(oddfile)\n\n\t#=====================\n\tdef getParticleInfo(self, reconid, iteration):\n\t\t\"\"\"\n\t\tGet all particle data for given recon and iteration\n\t\t\"\"\"\n\t\tt0 = time.time()\n\t\tcachefile = os.path.join(self.params['rundir'],\n\t\t\t\"refineparticledata-r\"+str(reconid)+\"-i\"+str(iteration)+\".cache\")\n\t\tif os.path.isfile(cachefile):\n\t\t\tapDisplay.printColor(\"loading refineparticledata from cache file\", \"cyan\")\n\t\t\tf = open(cachefile, 'r')\n\t\t\trefineparticledata = cPickle.load(f)\n\t\t\tf.close()\n\t\telse:\n\t\t\trefinerundata = appiondata.ApRefineRunData.direct_query(reconid)\n\t\t\tif not refinerundata:\n\t\t\t\tapDisplay.printError(\"Could not find refinerundata for reconrun id=\"+str(reconid))\n\n\t\t\trefineq = appiondata.ApRefineIterData()\n\t\t\trefineq['refineRun'] = refinerundata\n\t\t\trefineq['iteration'] = iteration\n\t\t\trefinedata = refineq.query(results=1)\n\n\t\t\tif not refinedata:\n\t\t\t\tapDisplay.printError(\"Could not find refinedata for reconrun id=\"\n\t\t\t\t\t+str(reconid)+\" iter=\"+str(iteration))\n\n\t\t\trefinepartq=appiondata.ApRefineParticleData()\n\t\t\trefinepartq['refineIter']=refinedata[0]\n\n\t\t\tapDisplay.printMsg(\"querying particles on \"+time.asctime())\n\t\t\trefineparticledata = refinepartq.query()\n\t\t\tapDisplay.printMsg(\"saving refineparticledata to cache file\")\n\t\t\tf = open(cachefile, 'w')\n\t\t\tcPickle.dump(refineparticledata, f)\n\t\t\tf.close()\n\n\t\tapDisplay.printMsg(\"received \"+str(len(refineparticledata))+\" particles in \"+apDisplay.timeString(time.time()-t0))\n\t\treturn refineparticledata\n\n\t#=====================\n\tdef procKeepList(self):\n\t\t\"\"\"\n\t\tRemoves particles by reading a list of particle numbers generated externally.\n\n\t\tRequirements:\n\t\t\tthe input file has one particle per line\n\t\t\tthe first piece of data is the particle number from the db\n\t\t\"\"\"\n\t\tkeeplist = []\n\t\tf = open(self.params['keeplist'], 'r')\n\t\tlines = f.readlines()\n\t\tf.close()\n\t\tfor n in lines:\n\t\t\twords = n.split()\n\t\t\tkeeplist.append(int(words[0])+1)\n\t\treturn keeplist\n\n\t#=====================\n\tdef makeClassAverages(self, classlist, outputstack, classdata, maskrad):\n\t\t#align images in class\n\t\t#print classlist\n\t\timages = EMAN.readImages(classlist, -1, -1, 0)\n\t\tfor image in images:\n\t\t\timage.rotateAndTranslate()\n\t\t\tif image.isFlipped():\n\t\t\t\timage.hFlip()\n\n\t\t#make class average\n\t\tavg = EMAN.EMData()\n\t\tavg.makeMedian(images)\n\n\t\t#write class average\n\t\te = EMAN.Euler()\n\t\talt = classdata['euler1']*math.pi/180\n\t\taz = classdata['euler2']*math.pi/180\n\t\tphi = 0.0 #classdata['euler3']*math.pi/180\n\t\te.setAngle(alt, az, phi)\n\t\tavg.setRAlign(e)\n\t\tavg.setNImg(len(images))\n\t\tavg.applyMask(maskrad, 0)\n\n\t\tavg.writeImage(outputstack,-1)\n\n\t#=====================\n\tdef determineClasses(self, particles):\n\t\t\"\"\"\n\t\tTakes refineparticledata and returns a dictionary of classes\n\t\t\"\"\"\n\t\tapDisplay.printMsg(\"sorting refineparticledata into classes\")\n\t\tt0 = time.time()\n\t\tclasses={}\n\t\tclass_stats={}\n\t\tquality=numpy.zeros(len(particles))\n\t\tfor partnum in range(len(particles)):\n\t\t\tquality[partnum] = particles[partnum]['quality_factor']\n\t\t\tkey = (\"%.3f_%.3f\"%(particles[partnum]['euler1'], particles[partnum]['euler2']))\n\t\t\tif key not in classes.keys():\n\t\t\t\tclasses[key]={}\n\t\t\t\tclasses[key]['particles']=[]\n\t\t\t\tclasses[key]['euler1'] = particles[partnum]['euler1']\n\t\t\t\tclasses[key]['euler2'] = particles[partnum]['euler2']\n\t\t\t\t#classes have no inplane rotation\n\t\t\t\tclasses[key]['euler3'] = 0.0 #particles[partnum]['euler3']\n\t\t\tclasses[key]['particles'].append(particles[partnum])\n\t\tclass_stats['meanquality']=quality.mean()\n\t\tclass_stats['stdquality']=quality.std()\n\t\tclass_stats['max']=quality.max()\n\t\tclass_stats['min']=quality.min()\n\t\tapDisplay.printMsg(\"sorted %d particles into %d classes\"%(len(particles), len(classes)))\n\t\t### print stats\n\t\tprint \"-- quality factor stats --\"\n\t\tprint (\"mean/std :: \"+str(round(class_stats['meanquality'],2))+\" +/- \"\n\t\t\t+str(round(class_stats['stdquality'],2)))\n\t\tprint (\"min/max :: \"+str(round(class_stats['min'],2))+\" <> \"\n\t\t\t+str(round(class_stats['max'],2)))\n\t\tapDisplay.printMsg(\"finished sorting in \"+apDisplay.timeString(time.time()-t0))\n\t\treturn classes, class_stats\n\n\t#=====================\n\tdef getClassData(self, reconid, iternum):\n\t\tt0 = time.time()\n\t\tcachefile = os.path.join(self.params['rundir'],\n\t\t\t\"partclassdata-r\"+str(reconid)+\"-i\"+str(iternum)+\".cache\")\n\t\tif os.path.isfile(cachefile):\n\t\t\tapDisplay.printColor(\"loading particle class data from cache file\", \"cyan\")\n\t\t\tf = open(cachefile, 'r')\n\t\t\tclasses = cPickle.load(f)\n\t\t\tf.close()\n\t\telse:\n\t\t\tapDisplay.printMsg(\"determine particle class data from database\")\n\t\t\tparticles = self.getParticleInfo(reconid, iternum)\n\t\t\tclasses, cstats = self.determineClasses(particles)\n\t\t\tf = open(cachefile, 'w')\n\t\t\tapDisplay.printMsg(\"saving particle class data to cache file\")\n\t\t\tcPickle.dump(classes, f)\n\t\t\tf.close()\n\t\tapDisplay.printMsg(\"received \"+str(len(classes))+\" classes in \"+apDisplay.timeString(time.time()-t0))\n\t\treturn classes\n\n\n\t#######################################################\n\t#### ITEMS BELOW CAN BE SPECIFIED IN A NEW PROGRAM ####\n\t#######################################################\n\n\t#=====================\n\tdef setupParserOptions(self):\n\t\tself.parser.set_usage(\"Usage: %prog --reconid=<DEF_id> --iter=<iter> --mask=<radius>\\n\\t \"\n\t\t\t+\"[ --stackname=<name> \"\n\t\t\t+\" --avgjump=<avg> --sigma=<sigma> --eotest ]\")\n\t\tself.parser.add_option(\"-r\", \"--reconid\", dest=\"reconid\", type=\"int\",\n\t\t\thelp=\"Reconstruction run id\", metavar=\"INT\")\n\t\tself.parser.add_option(\"-m\", \"--mask\", dest=\"mask\", type=\"int\",\n\t\t\thelp=\"Mask radius in pixels\", metavar=\"INT\")\n\t\tself.parser.add_option(\"-i\", \"--iter\", dest=\"iter\", type=\"int\",\n\t\t\thelp=\"Final eulers applied to particles will come from this iteration\", metavar=\"INT\")\n\t\tself.parser.add_option(\"--stackname\", dest=\"stackname\", default=\"goodavgs.hed\",\n\t\t\thelp=\"Name of the stack to write the averages\", metavar=\"file.hed\")\n\t\tself.parser.add_option(\"--keep-list\", dest=\"keeplist\",\n\t\t\thelp=\"Keep particles in the specified text file, EMAN style 0,1,...\", metavar=\"TEXT\")\n\t\tself.parser.add_option(\"--eotest\", dest=\"eotest\", default=False,\n\t\t\taction=\"store_true\", help=\"Perform even/odd test\")\n\n\t#=====================\n\tdef checkConflicts(self):\n\t\tif self.params['reconid'] is None:\n\t\t\tapDisplay.printError(\"enter a reconstruction ID from the database\")\n\t\tif self.params['mask'] is None:\n\t\t\tapDisplay.printError(\"enter a mask radius\")\n\t\tif self.params['iter'] is None:\n\t\t\tapDisplay.printError(\"enter an iteration for the final Eulers\")\n\t\tif self.params['keeplist'] is None:\n\t\t\tapDisplay.printError(\"enter an keep list file\")\n\t\tself.params['keeplist'] = os.path.abspath(self.params['keeplist'])\n\t\tif not os.path.isfile(self.params['keeplist']):\n\t\t\tapDisplay.printError(\"could not find list file\")\n\t\tself.params['stackid'] = apStack.getStackIdFromRecon(self.params['reconid'])\n\t\tif self.params['stackname'][-4:] != \".hed\":\n\t\t\ts = os.path.splitext(self.params['stackname'])[0]\n\t\t\ts += \".hed\"\n\t\t\tself.params['stackname'] = s\n\t\tapDisplay.printMsg(\"Stack name: \"+self.params['stackname'])\n\t\tself.params['symmetry'] = apSymmetry.getSymmetryFromReconRunId(self.params['reconid'])\n\t\tself.params['symmname'] = self.params['symmetry']['eman_name']\n\n\t#=====================\n\tdef setRunDir(self):\n\t\trefdata = appiondata.ApRefineRunData.direct_query(self.params['reconid'])\n\t\tif not refdata:\n\t\t\tapDisplay.printError(\"reconid \"+str(self.params['reconid'])+\" does not exist in the database\")\n\t\trefpath = refdata['path']['path']\n\t\trundir = os.path.join(refpath, \"../../satEuler/sat-recon%d/volumes\"%(self.params['reconid']))\n\t\tself.params['rundir'] = os.path.abspath(rundir)\n\n\t#=====================\n\tdef start(self):\n\t\tself.rootname = self.params['stackname'].split(\".\")[0]\n\t\tself.params['outputstack'] = os.path.join(self.params['rundir'], self.params['stackname'])\n\n\t\tif os.path.isfile(self.params['outputstack']):\n\t\t\tapFile.removeStack(self.params['outputstack'])\n\t\tif self.params['eotest'] is True:\n\t\t\tself.params['evenstack'] = os.path.splitext(self.params['outputstack'])[0]+'.even.hed'\n\t\t\tif os.path.isfile(self.params['evenstack']):\n\t\t\t\tapFile.removeStack(self.params['evenstack'])\n\t\t\tself.params['oddstack'] = os.path.splitext(self.params['outputstack'])[0]+'.odd.hed'\n\t\t\tif os.path.isfile(self.params['oddstack']):\n\t\t\t\tapFile.removeStack(self.params['oddstack'])\n\n\t\tclasses = self.getClassData(self.params['reconid'], self.params['iter'])\n\t\tstackid = apStack.getStackIdFromRecon(self.params['reconid'])\n\t\tstackdata = apStack.getOnlyStackData(stackid)\n\t\tstackpath = os.path.join(stackdata['path']['path'], stackdata['name'])\n\n\t\tclasskeys = classes.keys()\n\t\tclasskeys.sort()\n\n\t\tclassnum=0\n\t\tkeeplist = self.procKeepList()\n\t\tfinallist = []\n\t\tapDisplay.printMsg(\"Processing \"+str(len(classes))+\" classes\")\n\t\t#loop through classes\n\t\tfor key in classkeys:\n\t\t\tclassnum+=1\n\t\t\tif classnum%10 == 1:\n\t\t\t\tsys.stderr.write(\"\\b\\b\\b\\b\\b\\b\\b\\b\\b\\b\\b\\b\\b\\b\")\n\t\t\t\tsys.stderr.write(str(classnum)+\" of \"+(str(len(classkeys))))\n\n\t\t\t# loop through particles in class\n\t\t\tclassfile = self.rootname+\"-class.lst\"\n\t\t\tclassf = open(classfile, 'w')\n\t\t\tclassf.write('#LST\\n')\n\t\t\tnptcls=0\n\t\t\tfor ptcl in classes[key]['particles']:\n\t\t\t\t# translate DB into EMAN\n\t\t\t\tpartnum = ptcl['particle']['particleNumber'] - 1\n\t\t\t\tif partnum in keeplist:\n\t\t\t\t\tif ptcl['mirror']:\n\t\t\t\t\t\tmirror=1\n\t\t\t\t\telse:\n\t\t\t\t\t\tmirror=0\n\t\t\t\t\trot = ptcl['euler3']*math.pi/180.0\n\t\t\t\t\tclassf.write(\n\t\t\t\t\t\t\"%d\\t%s\\t%f,\\t%f,%f,%f,%d\\n\" %\n\t\t\t\t\t\t(partnum, stackpath, ptcl['quality_factor'],\n\t\t\t\t\t\trot, ptcl['shiftx'], ptcl['shifty'], mirror))\n\t\t\t\t\tnptcls+=1\n\t\t\t\t\tfinallist.append(partnum)\n\t\t\tclassf.close()\n\n\t\t\tif nptcls<1:\n\t\t\t\tcontinue\n\t\t\tself.makeClassAverages(classfile, self.params['outputstack'], classes[key], self.params['mask'])\n\t\t\tif self.params['eotest'] is True:\n\t\t\t\tself.makeEvenOddClasses(classfile, self.params['outputstack'], classes[key], self.params['mask'])\n\n\t\t\tapFile.removeFile(classfile)\n\n\t\tsys.stderr.write(\"\\n\")\n\t\tfinalfilename = self.rootname+\"-keep.lst\"\n\t\tfinalf = open(finalfilename, 'w')\n\t\tfinallist.sort()\n\t\tfor partnum in finallist:\n\t\t\tfinalf.write('%d\\n' % (partnum,) )\n\t\tfinalf.close()\n\t\tstackstr = str(stackdata.dbid)\n\t\treconstr = str(self.params['reconid'])\n\n\t\t### recon 3d volumes\n\t\tthreedname = os.path.join(self.params['rundir'], self.rootname+\".\"+str(self.params['iter'])+\"a.mrc\")\n\t\temancmd = ( \"make3d \"+self.params['outputstack']+\" out=\"\n\t\t\t+threedname+\" hard=50 sym=\"+self.params['symmname']+\" pad=240 mask=\"+str(self.params['mask'])+\"; echo ''\" )\n\t\t#print emancmd\n\t\tapEMAN.executeEmanCmd(emancmd, verbose=False, showcmd=True, logfile=self.rootname+\"-eman.log\")\n\t\tthreednameb = os.path.join(self.params['rundir'], self.rootname+\".\"+str(self.params['iter'])+\"b.mrc\")\n\t\temancmd = ( \"proc3d \"+threedname+\" \"+threednameb\n\t\t\t+\" apix=1.63 norm=0,1 lp=8 origin=0,0,0 mask=\"+str(self.params['mask'])+\"; echo '' \" )\n\t\tapEMAN.executeEmanCmd(emancmd, verbose=False, showcmd=True, logfile=self.rootname+\"-eman.log\")\n\t\tif self.params['eotest'] is True:\n\t\t\t# even\n\t\t\tevenname = os.path.join(self.params['rundir'], self.rootname+\"-even.\"+str(self.params['iter'])+\"a.mrc\")\n\t\t\tif os.path.isfile(self.params['evenstack']):\n\t\t\t\tevenemancmd = ( \"make3d \"+self.params['evenstack']+\" out=\"\n\t\t\t\t\t+evenname+\" hard=50 sym=\"+self.params['symmname']+\" pad=240 mask=\"+str(self.params['mask'])+\"; echo ''\" )\n\t\t\t\t#print evenemancmd\n\t\t\t\tapEMAN.executeEmanCmd(evenemancmd, verbose=False, showcmd=True, logfile=self.rootname+\"-eveneman.log\")\n\t\t\telse:\n\t\t\t\tapDisplay.printWarning(\"file \"+self.params['evenstack']+\" does not exist\")\n\n\t\t\t# odd\n\t\t\toddname = os.path.join(self.params['rundir'], self.rootname+\"-odd.\"+str(self.params['iter'])+\"a.mrc\")\n\t\t\tif os.path.isfile(self.params['oddstack']):\n\t\t\t\toddemancmd = ( \"make3d \"+self.params['oddstack']+\" out=\"\n\t\t\t\t\t+oddname+\" hard=50 sym=\"+self.params['symmname']+\" pad=240 mask=\"+str(self.params['mask'])+\"; echo ''\" )\n\t\t\t\t#print oddemancmd\n\t\t\t\tapEMAN.executeEmanCmd(oddemancmd, verbose=False, showcmd=True, logfile=self.rootname+\"-oddeman.log\")\n\t\t\telse:\n\t\t\t\tapDisplay.printWarning(\"file \"+self.params['oddstack']+\" does not exist\")\n\n\t\t\t#eotest\n\t\t\tfscout = os.path.join(self.params['rundir'], self.rootname+\"-fsc.eotest\")\n\t\t\tif os.path.isfile(oddname) and os.path.isfile(evenname):\n\t\t\t\teotestcmd = \"proc3d \"+oddname+\" \"+evenname+\" fsc=\"+fscout\n\t\t\t\tapEMAN.executeEmanCmd(eotestcmd, verbose=True, showcmd=True)\n\t\t\telse:\n\t\t\t\tapDisplay.printWarning(\"could not perform eotest\")\n\n\t\t\tif os.path.isfile(fscout):\n\t\t\t\tres = apRecon.getResolutionFromFSCFile(fscout, 160.0, 1.63)\n\t\t\t\tapDisplay.printColor( (\"resolution: %.5f\" % (res)), \"cyan\")\n\t\t\t\tresfile = self.rootname+\"-res.txt\"\n\t\t\t\tf = open(resfile, 'a')\n\t\t\t\tf.write(\"[ %s ]\\nresolution: %.5f\\n\" % (time.asctime(), res))\n\t\t\t\tf.close()\n\n#=====================\n#=====================\nif __name__ == '__main__':\n\tsatavg = satAverageScript()\n\tsatavg.start()\n\tsatavg.close()\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import numpy as np
class settings:
def __init__(self, xmax, xmin, ymax, ymin, yrange, xrange):
self.xmax = xmax
self.xmin = xmin
self.ymax = ymax
self.ymin = ymin
self.yrange = yrange
self.xrange = xrange
pass
def mapminmax(x, ymin=-1.0, ymax=1.0):
return create(x, ymin, ymax)
def create(x, ymin, ymax):
xrows = x.shape[0]
xmin = x.min(1)
xmax = x.max(1)
xrange = xmax - xmin
yrows = xrows
yrange = ymax - ymin
gain = yrange / xrange
fix = np.nonzero(~np.isfinite(xrange) | (xrange == 0))
if(not all(fix)):
None
else:
gain[fix] = 1
xmin[fix] = ymin
return [mapminmax_apply(x, xrange, xmin, yrange, ymin),
settings(xmax=xmax, xmin=xmin, ymax=ymax, ymin=ymin, yrange=yrange, xrange=xrange)]
def mapminmax_apply(x, xrange, xmin, yrange, ymin):
gain = yrange / xrange
fix = np.nonzero(~np.isfinite(xrange) | (xrange == 0))
if(not all(fix)):
None
else:
gain[fix] = 1
xmin[fix] = ymin
cd = np.multiply((np.ones((x.shape[0], x.shape[1]))), xmin.values.reshape(x.shape[0], 1))
a = x - cd
b = np.multiply((np.ones((x.shape[0], x.shape[1]))), gain.values.reshape(x.shape[0], 1))
return np.multiply(a, b) + ymin
class MapMinMaxApplier(object):
def __init__(self, slope, intercept):
self.slope = slope
self.intercept = intercept
def __call__(self, x):
return x * self.slope + self.intercept
def reverse(self, y):
return (y-self.intercept) / self.slope
def mapminmax_rev(x, ymin=-1, ymax=+1):
x = np.asanyarray(x)
xmax = x.max(axis=-1)
xmin = x.min(axis=-1)
if (xmax==xmin).any():
raise ValueError("some rows have no variation")
slope = ((ymax-ymin) / (xmax - xmin))[:,np.newaxis]
intercept = (-xmin*(ymax-ymin)/(xmax-xmin))[:,np.newaxis] + ymin
ps = MapMinMaxApplier(slope, intercept)
return ps(x), ps
|
normal
|
{
"blob_id": "e4a66617adbe863459e33f77c32c89e901f66995",
"index": 2309,
"step-1": "<mask token>\n\n\nclass settings:\n\n def __init__(self, xmax, xmin, ymax, ymin, yrange, xrange):\n self.xmax = xmax\n self.xmin = xmin\n self.ymax = ymax\n self.ymin = ymin\n self.yrange = yrange\n self.xrange = xrange\n pass\n\n\n<mask token>\n\n\nclass MapMinMaxApplier(object):\n\n def __init__(self, slope, intercept):\n self.slope = slope\n self.intercept = intercept\n\n def __call__(self, x):\n return x * self.slope + self.intercept\n\n def reverse(self, y):\n return (y - self.intercept) / self.slope\n\n\ndef mapminmax_rev(x, ymin=-1, ymax=+1):\n x = np.asanyarray(x)\n xmax = x.max(axis=-1)\n xmin = x.min(axis=-1)\n if (xmax == xmin).any():\n raise ValueError('some rows have no variation')\n slope = ((ymax - ymin) / (xmax - xmin))[:, np.newaxis]\n intercept = (-xmin * (ymax - ymin) / (xmax - xmin))[:, np.newaxis] + ymin\n ps = MapMinMaxApplier(slope, intercept)\n return ps(x), ps\n",
"step-2": "<mask token>\n\n\nclass settings:\n\n def __init__(self, xmax, xmin, ymax, ymin, yrange, xrange):\n self.xmax = xmax\n self.xmin = xmin\n self.ymax = ymax\n self.ymin = ymin\n self.yrange = yrange\n self.xrange = xrange\n pass\n\n\n<mask token>\n\n\ndef create(x, ymin, ymax):\n xrows = x.shape[0]\n xmin = x.min(1)\n xmax = x.max(1)\n xrange = xmax - xmin\n yrows = xrows\n yrange = ymax - ymin\n gain = yrange / xrange\n fix = np.nonzero(~np.isfinite(xrange) | (xrange == 0))\n if not all(fix):\n None\n else:\n gain[fix] = 1\n xmin[fix] = ymin\n return [mapminmax_apply(x, xrange, xmin, yrange, ymin), settings(xmax=\n xmax, xmin=xmin, ymax=ymax, ymin=ymin, yrange=yrange, xrange=xrange)]\n\n\n<mask token>\n\n\nclass MapMinMaxApplier(object):\n\n def __init__(self, slope, intercept):\n self.slope = slope\n self.intercept = intercept\n\n def __call__(self, x):\n return x * self.slope + self.intercept\n\n def reverse(self, y):\n return (y - self.intercept) / self.slope\n\n\ndef mapminmax_rev(x, ymin=-1, ymax=+1):\n x = np.asanyarray(x)\n xmax = x.max(axis=-1)\n xmin = x.min(axis=-1)\n if (xmax == xmin).any():\n raise ValueError('some rows have no variation')\n slope = ((ymax - ymin) / (xmax - xmin))[:, np.newaxis]\n intercept = (-xmin * (ymax - ymin) / (xmax - xmin))[:, np.newaxis] + ymin\n ps = MapMinMaxApplier(slope, intercept)\n return ps(x), ps\n",
"step-3": "<mask token>\n\n\nclass settings:\n\n def __init__(self, xmax, xmin, ymax, ymin, yrange, xrange):\n self.xmax = xmax\n self.xmin = xmin\n self.ymax = ymax\n self.ymin = ymin\n self.yrange = yrange\n self.xrange = xrange\n pass\n\n\n<mask token>\n\n\ndef create(x, ymin, ymax):\n xrows = x.shape[0]\n xmin = x.min(1)\n xmax = x.max(1)\n xrange = xmax - xmin\n yrows = xrows\n yrange = ymax - ymin\n gain = yrange / xrange\n fix = np.nonzero(~np.isfinite(xrange) | (xrange == 0))\n if not all(fix):\n None\n else:\n gain[fix] = 1\n xmin[fix] = ymin\n return [mapminmax_apply(x, xrange, xmin, yrange, ymin), settings(xmax=\n xmax, xmin=xmin, ymax=ymax, ymin=ymin, yrange=yrange, xrange=xrange)]\n\n\ndef mapminmax_apply(x, xrange, xmin, yrange, ymin):\n gain = yrange / xrange\n fix = np.nonzero(~np.isfinite(xrange) | (xrange == 0))\n if not all(fix):\n None\n else:\n gain[fix] = 1\n xmin[fix] = ymin\n cd = np.multiply(np.ones((x.shape[0], x.shape[1])), xmin.values.reshape\n (x.shape[0], 1))\n a = x - cd\n b = np.multiply(np.ones((x.shape[0], x.shape[1])), gain.values.reshape(\n x.shape[0], 1))\n return np.multiply(a, b) + ymin\n\n\nclass MapMinMaxApplier(object):\n\n def __init__(self, slope, intercept):\n self.slope = slope\n self.intercept = intercept\n\n def __call__(self, x):\n return x * self.slope + self.intercept\n\n def reverse(self, y):\n return (y - self.intercept) / self.slope\n\n\ndef mapminmax_rev(x, ymin=-1, ymax=+1):\n x = np.asanyarray(x)\n xmax = x.max(axis=-1)\n xmin = x.min(axis=-1)\n if (xmax == xmin).any():\n raise ValueError('some rows have no variation')\n slope = ((ymax - ymin) / (xmax - xmin))[:, np.newaxis]\n intercept = (-xmin * (ymax - ymin) / (xmax - xmin))[:, np.newaxis] + ymin\n ps = MapMinMaxApplier(slope, intercept)\n return ps(x), ps\n",
"step-4": "import numpy as np\n\n\nclass settings:\n\n def __init__(self, xmax, xmin, ymax, ymin, yrange, xrange):\n self.xmax = xmax\n self.xmin = xmin\n self.ymax = ymax\n self.ymin = ymin\n self.yrange = yrange\n self.xrange = xrange\n pass\n\n\ndef mapminmax(x, ymin=-1.0, ymax=1.0):\n return create(x, ymin, ymax)\n\n\ndef create(x, ymin, ymax):\n xrows = x.shape[0]\n xmin = x.min(1)\n xmax = x.max(1)\n xrange = xmax - xmin\n yrows = xrows\n yrange = ymax - ymin\n gain = yrange / xrange\n fix = np.nonzero(~np.isfinite(xrange) | (xrange == 0))\n if not all(fix):\n None\n else:\n gain[fix] = 1\n xmin[fix] = ymin\n return [mapminmax_apply(x, xrange, xmin, yrange, ymin), settings(xmax=\n xmax, xmin=xmin, ymax=ymax, ymin=ymin, yrange=yrange, xrange=xrange)]\n\n\ndef mapminmax_apply(x, xrange, xmin, yrange, ymin):\n gain = yrange / xrange\n fix = np.nonzero(~np.isfinite(xrange) | (xrange == 0))\n if not all(fix):\n None\n else:\n gain[fix] = 1\n xmin[fix] = ymin\n cd = np.multiply(np.ones((x.shape[0], x.shape[1])), xmin.values.reshape\n (x.shape[0], 1))\n a = x - cd\n b = np.multiply(np.ones((x.shape[0], x.shape[1])), gain.values.reshape(\n x.shape[0], 1))\n return np.multiply(a, b) + ymin\n\n\nclass MapMinMaxApplier(object):\n\n def __init__(self, slope, intercept):\n self.slope = slope\n self.intercept = intercept\n\n def __call__(self, x):\n return x * self.slope + self.intercept\n\n def reverse(self, y):\n return (y - self.intercept) / self.slope\n\n\ndef mapminmax_rev(x, ymin=-1, ymax=+1):\n x = np.asanyarray(x)\n xmax = x.max(axis=-1)\n xmin = x.min(axis=-1)\n if (xmax == xmin).any():\n raise ValueError('some rows have no variation')\n slope = ((ymax - ymin) / (xmax - xmin))[:, np.newaxis]\n intercept = (-xmin * (ymax - ymin) / (xmax - xmin))[:, np.newaxis] + ymin\n ps = MapMinMaxApplier(slope, intercept)\n return ps(x), ps\n",
"step-5": "\r\nimport numpy as np\r\n\r\nclass settings:\r\n def __init__(self, xmax, xmin, ymax, ymin, yrange, xrange):\r\n self.xmax = xmax\r\n self.xmin = xmin\r\n self.ymax = ymax\r\n self.ymin = ymin\r\n self.yrange = yrange\r\n self.xrange = xrange\r\n pass\r\n\r\n\r\ndef mapminmax(x, ymin=-1.0, ymax=1.0):\r\n return create(x, ymin, ymax)\r\n\r\n\r\ndef create(x, ymin, ymax):\r\n xrows = x.shape[0]\r\n xmin = x.min(1)\r\n xmax = x.max(1)\r\n\r\n xrange = xmax - xmin\r\n yrows = xrows\r\n yrange = ymax - ymin\r\n\r\n gain = yrange / xrange\r\n\r\n fix = np.nonzero(~np.isfinite(xrange) | (xrange == 0))\r\n\r\n if(not all(fix)):\r\n None\r\n else:\r\n gain[fix] = 1\r\n xmin[fix] = ymin\r\n\r\n return [mapminmax_apply(x, xrange, xmin, yrange, ymin),\r\n settings(xmax=xmax, xmin=xmin, ymax=ymax, ymin=ymin, yrange=yrange, xrange=xrange)]\r\n\r\n\r\ndef mapminmax_apply(x, xrange, xmin, yrange, ymin):\r\n gain = yrange / xrange\r\n\r\n fix = np.nonzero(~np.isfinite(xrange) | (xrange == 0))\r\n if(not all(fix)):\r\n None\r\n else:\r\n gain[fix] = 1\r\n xmin[fix] = ymin\r\n\r\n cd = np.multiply((np.ones((x.shape[0], x.shape[1]))), xmin.values.reshape(x.shape[0], 1))\r\n a = x - cd\r\n\r\n b = np.multiply((np.ones((x.shape[0], x.shape[1]))), gain.values.reshape(x.shape[0], 1))\r\n return np.multiply(a, b) + ymin\r\n\r\n\r\nclass MapMinMaxApplier(object):\r\n def __init__(self, slope, intercept):\r\n self.slope = slope\r\n self.intercept = intercept\r\n def __call__(self, x):\r\n return x * self.slope + self.intercept\r\n def reverse(self, y):\r\n return (y-self.intercept) / self.slope\r\n \r\ndef mapminmax_rev(x, ymin=-1, ymax=+1):\r\n x = np.asanyarray(x)\r\n xmax = x.max(axis=-1)\r\n xmin = x.min(axis=-1)\r\n if (xmax==xmin).any():\r\n raise ValueError(\"some rows have no variation\")\r\n slope = ((ymax-ymin) / (xmax - xmin))[:,np.newaxis]\r\n intercept = (-xmin*(ymax-ymin)/(xmax-xmin))[:,np.newaxis] + ymin\r\n ps = MapMinMaxApplier(slope, intercept)\r\n return ps(x), ps",
"step-ids": [
7,
8,
9,
11,
12
]
}
|
[
7,
8,
9,
11,
12
] |
# https://leetcode.com/problems/wiggle-subsequence/
#
# algorithms
# Medium (36.9%)
# Total Accepted: 43,722
# Total Submissions: 118,490
# beats 100.0% of python submissions
class Solution(object):
def wiggleMaxLength(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
length = len(nums)
if length < 2:
return length
dp = [[0] * 2 for _ in xrange(length)]
dp[0] = [1, 1]
for i in xrange(1, length):
if nums[i] > nums[i - 1]:
dp[i][0] += dp[i - 1][1] + 1
dp[i][1] = dp[i - 1][1]
elif nums[i] < nums[i - 1]:
dp[i][1] += dp[i - 1][0] + 1
dp[i][0] = dp[i - 1][0]
else:
dp[i] = dp[i - 1]
return max(dp[-1])
|
normal
|
{
"blob_id": "6c1f7b8e71760cac443a06f68f5f6ee3c2151e50",
"index": 8170,
"step-1": "<mask token>\n",
"step-2": "class Solution(object):\n <mask token>\n",
"step-3": "class Solution(object):\n\n def wiggleMaxLength(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: int\n \"\"\"\n length = len(nums)\n if length < 2:\n return length\n dp = [([0] * 2) for _ in xrange(length)]\n dp[0] = [1, 1]\n for i in xrange(1, length):\n if nums[i] > nums[i - 1]:\n dp[i][0] += dp[i - 1][1] + 1\n dp[i][1] = dp[i - 1][1]\n elif nums[i] < nums[i - 1]:\n dp[i][1] += dp[i - 1][0] + 1\n dp[i][0] = dp[i - 1][0]\n else:\n dp[i] = dp[i - 1]\n return max(dp[-1])\n",
"step-4": "# https://leetcode.com/problems/wiggle-subsequence/\n#\n# algorithms\n# Medium (36.9%)\n# Total Accepted: 43,722\n# Total Submissions: 118,490\n# beats 100.0% of python submissions\n\n\nclass Solution(object):\n def wiggleMaxLength(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: int\n \"\"\"\n length = len(nums)\n\n if length < 2:\n return length\n\n dp = [[0] * 2 for _ in xrange(length)]\n dp[0] = [1, 1]\n\n for i in xrange(1, length):\n if nums[i] > nums[i - 1]:\n dp[i][0] += dp[i - 1][1] + 1\n dp[i][1] = dp[i - 1][1]\n elif nums[i] < nums[i - 1]:\n dp[i][1] += dp[i - 1][0] + 1\n dp[i][0] = dp[i - 1][0]\n else:\n dp[i] = dp[i - 1]\n\n return max(dp[-1])\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
BASE_DIR = os.path.dirname(__file__)
SECRET_KEY = '6@j!6%foulnrume$wc7i5cwc2ppf6hcxoa&xh_vtanfy_rc@yc'
DEBUG = True
EXCEPTION_INGORE_AJAX = True
TEMPLATE_DEBUG = True
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
RESOURCE_ROOT_PATH = os.path.join(BASE_DIR, 'templates').replace('\\', '/')
STATIC_ROOT_PATH = os.path.join(BASE_DIR, 'static').replace('\\', '/')
ALLOWED_HOSTS = []
SITE_ID = 1
AUTH_USER_MODEL = 'member.User'
INSTALLED_APPS = ('apps.member', 'django.contrib.admin',
'libs.djex.autodocs', 'django.contrib.auth',
'django.contrib.contenttypes', 'django.contrib.sessions',
'django.contrib.messages', 'django.contrib.staticfiles',
'django.contrib.sites', 'django.contrib.flatpages', 'hehotel',
'apps.room', 'apps.order', 'apps.article')
MIDDLEWARE_CLASSES = (
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware')
ROOT_URLCONF = 'urls'
WSGI_APPLICATION = 'wsgi.application'
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': os
.path.join(os.path.join(BASE_DIR, 'data'), 'db.sqlite3')}}
LANGUAGE_CODE = 'zh_CN'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/__static__/'
SHOW_SQL = True
LOGGING = {'version': 1, 'disable_existing_loggers': False, 'handlers': {
'file': {'level': 'INFO', 'class': 'logging.FileHandler', 'filename':
os.path.join(os.path.dirname(__file__), 'logs/auto.log'), 'mode': 'a'}},
'loggers': {'log': {'handlers': ['file'], 'level': 'INFO', 'propagate':
True}}}
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import os
BASE_DIR = os.path.dirname(__file__)
SECRET_KEY = '6@j!6%foulnrume$wc7i5cwc2ppf6hcxoa&xh_vtanfy_rc@yc'
DEBUG = True
EXCEPTION_INGORE_AJAX = True
TEMPLATE_DEBUG = True
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
RESOURCE_ROOT_PATH = os.path.join(BASE_DIR, 'templates').replace('\\', '/')
STATIC_ROOT_PATH = os.path.join(BASE_DIR, 'static').replace('\\', '/')
ALLOWED_HOSTS = []
SITE_ID = 1
AUTH_USER_MODEL = 'member.User'
INSTALLED_APPS = ('apps.member', 'django.contrib.admin',
'libs.djex.autodocs', 'django.contrib.auth',
'django.contrib.contenttypes', 'django.contrib.sessions',
'django.contrib.messages', 'django.contrib.staticfiles',
'django.contrib.sites', 'django.contrib.flatpages', 'hehotel',
'apps.room', 'apps.order', 'apps.article')
MIDDLEWARE_CLASSES = (
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware')
ROOT_URLCONF = 'urls'
WSGI_APPLICATION = 'wsgi.application'
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': os
.path.join(os.path.join(BASE_DIR, 'data'), 'db.sqlite3')}}
LANGUAGE_CODE = 'zh_CN'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/__static__/'
SHOW_SQL = True
LOGGING = {'version': 1, 'disable_existing_loggers': False, 'handlers': {
'file': {'level': 'INFO', 'class': 'logging.FileHandler', 'filename':
os.path.join(os.path.dirname(__file__), 'logs/auto.log'), 'mode': 'a'}},
'loggers': {'log': {'handlers': ['file'], 'level': 'INFO', 'propagate':
True}}}
<|reserved_special_token_1|>
#-*- coding:utf-8 -*-
"""
Django settings for hehotel project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(__file__)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '6@j!6%foulnrume$wc7i5cwc2ppf6hcxoa&xh_vtanfy_rc@yc'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True #结束开发状态应该切为False
EXCEPTION_INGORE_AJAX = True #异常信息即便是ajax请求也直接返回Html页面
TEMPLATE_DEBUG = True
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
RESOURCE_ROOT_PATH = os.path.join(BASE_DIR, 'templates').replace('\\','/')
#STATIC_ROOT_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates\static').replace('\\','/')
STATIC_ROOT_PATH = os.path.join(BASE_DIR, 'static').replace('\\','/')
ALLOWED_HOSTS = []
# Application definition
SITE_ID = 1
AUTH_USER_MODEL = 'member.User'
INSTALLED_APPS = (
'apps.member',
'django.contrib.admin',
'libs.djex.autodocs',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.flatpages',
'hehotel',
'apps.room',
'apps.order',
'apps.article',
)
MIDDLEWARE_CLASSES = (
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
#'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'urls'
WSGI_APPLICATION = 'wsgi.application'
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(os.path.join(BASE_DIR, 'data'), 'db.sqlite3'),
},
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'zh_CN'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/__static__/'
SHOW_SQL = True#是否在console窗口显示sql语句
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
#'formatter': 'simple',
'filename': os.path.join(os.path.dirname(__file__), 'logs/auto.log'),
'mode': 'a',
}
},
'loggers': {
'log':{
'handlers': ['file'],
#'filters': ['special'],
'level': 'INFO',
'propagate': True
}
}
}
|
flexible
|
{
"blob_id": "045ad27f46c2090ed39a49144c3aa17093b0d9c7",
"index": 7094,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nBASE_DIR = os.path.dirname(__file__)\nSECRET_KEY = '6@j!6%foulnrume$wc7i5cwc2ppf6hcxoa&xh_vtanfy_rc@yc'\nDEBUG = True\nEXCEPTION_INGORE_AJAX = True\nTEMPLATE_DEBUG = True\nTEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]\nRESOURCE_ROOT_PATH = os.path.join(BASE_DIR, 'templates').replace('\\\\', '/')\nSTATIC_ROOT_PATH = os.path.join(BASE_DIR, 'static').replace('\\\\', '/')\nALLOWED_HOSTS = []\nSITE_ID = 1\nAUTH_USER_MODEL = 'member.User'\nINSTALLED_APPS = ('apps.member', 'django.contrib.admin',\n 'libs.djex.autodocs', 'django.contrib.auth',\n 'django.contrib.contenttypes', 'django.contrib.sessions',\n 'django.contrib.messages', 'django.contrib.staticfiles',\n 'django.contrib.sites', 'django.contrib.flatpages', 'hehotel',\n 'apps.room', 'apps.order', 'apps.article')\nMIDDLEWARE_CLASSES = (\n 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware')\nROOT_URLCONF = 'urls'\nWSGI_APPLICATION = 'wsgi.application'\nSESSION_EXPIRE_AT_BROWSER_CLOSE = False\nDATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': os\n .path.join(os.path.join(BASE_DIR, 'data'), 'db.sqlite3')}}\nLANGUAGE_CODE = 'zh_CN'\nTIME_ZONE = 'Asia/Shanghai'\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\nSTATIC_URL = '/__static__/'\nSHOW_SQL = True\nLOGGING = {'version': 1, 'disable_existing_loggers': False, 'handlers': {\n 'file': {'level': 'INFO', 'class': 'logging.FileHandler', 'filename':\n os.path.join(os.path.dirname(__file__), 'logs/auto.log'), 'mode': 'a'}},\n 'loggers': {'log': {'handlers': ['file'], 'level': 'INFO', 'propagate':\n True}}}\n",
"step-3": "<mask token>\nimport os\nBASE_DIR = os.path.dirname(__file__)\nSECRET_KEY = '6@j!6%foulnrume$wc7i5cwc2ppf6hcxoa&xh_vtanfy_rc@yc'\nDEBUG = True\nEXCEPTION_INGORE_AJAX = True\nTEMPLATE_DEBUG = True\nTEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]\nRESOURCE_ROOT_PATH = os.path.join(BASE_DIR, 'templates').replace('\\\\', '/')\nSTATIC_ROOT_PATH = os.path.join(BASE_DIR, 'static').replace('\\\\', '/')\nALLOWED_HOSTS = []\nSITE_ID = 1\nAUTH_USER_MODEL = 'member.User'\nINSTALLED_APPS = ('apps.member', 'django.contrib.admin',\n 'libs.djex.autodocs', 'django.contrib.auth',\n 'django.contrib.contenttypes', 'django.contrib.sessions',\n 'django.contrib.messages', 'django.contrib.staticfiles',\n 'django.contrib.sites', 'django.contrib.flatpages', 'hehotel',\n 'apps.room', 'apps.order', 'apps.article')\nMIDDLEWARE_CLASSES = (\n 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware')\nROOT_URLCONF = 'urls'\nWSGI_APPLICATION = 'wsgi.application'\nSESSION_EXPIRE_AT_BROWSER_CLOSE = False\nDATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': os\n .path.join(os.path.join(BASE_DIR, 'data'), 'db.sqlite3')}}\nLANGUAGE_CODE = 'zh_CN'\nTIME_ZONE = 'Asia/Shanghai'\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\nSTATIC_URL = '/__static__/'\nSHOW_SQL = True\nLOGGING = {'version': 1, 'disable_existing_loggers': False, 'handlers': {\n 'file': {'level': 'INFO', 'class': 'logging.FileHandler', 'filename':\n os.path.join(os.path.dirname(__file__), 'logs/auto.log'), 'mode': 'a'}},\n 'loggers': {'log': {'handlers': ['file'], 'level': 'INFO', 'propagate':\n True}}}\n",
"step-4": "#-*- coding:utf-8 -*-\n\"\"\"\nDjango settings for hehotel project.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/1.7/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/1.7/ref/settings/\n\"\"\"\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nimport os\nBASE_DIR = os.path.dirname(__file__)\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = '6@j!6%foulnrume$wc7i5cwc2ppf6hcxoa&xh_vtanfy_rc@yc'\n\n# SECURITY WARNING: don't run with debug turned on in production!\nDEBUG = True #结束开发状态应该切为False\nEXCEPTION_INGORE_AJAX = True #异常信息即便是ajax请求也直接返回Html页面\n\nTEMPLATE_DEBUG = True\nTEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]\nRESOURCE_ROOT_PATH = os.path.join(BASE_DIR, 'templates').replace('\\\\','/')\n#STATIC_ROOT_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates\\static').replace('\\\\','/')\nSTATIC_ROOT_PATH = os.path.join(BASE_DIR, 'static').replace('\\\\','/')\n\nALLOWED_HOSTS = []\n\n# Application definition\nSITE_ID = 1\n\nAUTH_USER_MODEL = 'member.User'\n\nINSTALLED_APPS = (\n 'apps.member',\n 'django.contrib.admin',\n 'libs.djex.autodocs',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n 'django.contrib.sites',\n 'django.contrib.flatpages',\n 'hehotel',\n 'apps.room',\n 'apps.order',\n 'apps.article',\n)\n\nMIDDLEWARE_CLASSES = (\n 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n #'django.contrib.auth.middleware.SessionAuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n)\n\nROOT_URLCONF = 'urls'\n\nWSGI_APPLICATION = 'wsgi.application'\nSESSION_EXPIRE_AT_BROWSER_CLOSE = False\n\n\n# Database\n# https://docs.djangoproject.com/en/1.7/ref/settings/#databases\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.sqlite3',\n 'NAME': os.path.join(os.path.join(BASE_DIR, 'data'), 'db.sqlite3'),\n },\n}\n\n# Internationalization\n# https://docs.djangoproject.com/en/1.7/topics/i18n/\n\nLANGUAGE_CODE = 'zh_CN'\n\nTIME_ZONE = 'Asia/Shanghai'\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/1.7/howto/static-files/\n\nSTATIC_URL = '/__static__/'\n\nSHOW_SQL = True#是否在console窗口显示sql语句\nLOGGING = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'handlers': {\n 'file': {\n 'level': 'INFO',\n 'class': 'logging.FileHandler',\n #'formatter': 'simple',\n 'filename': os.path.join(os.path.dirname(__file__), 'logs/auto.log'),\n 'mode': 'a',\n }\n },\n 'loggers': {\n 'log':{\n 'handlers': ['file'],\n #'filters': ['special'],\n 'level': 'INFO',\n 'propagate': True\n } \n }\n}\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import pygame
from settings import *
import random
class Cell:
def __init__(self, game, x, y, bombs):
self.game = game
self.x = x
self.y = y
self.i = x // TILESIZE
self.j = y // TILESIZE
self.revelada = False
self.bomba = False
self.bombas_total = bombs
self.bombs_around = 0
self.flag_enabled = False
def reveal(self):
if not self.game.is_game_over:
self.revelada = True
if self.bombs_around == 0:
self.flood()
if self.bomba:
self.game.is_game_over = True
self.game.score = 0
EFFECT.play()
def check_neighbours(self, grid):
"""
This function will count how many bombs there is around a particular cell
"""
if self.bomba:
self.bombs_around = -1
return
total = 0
for x in range(-1, 2):
for y in range(-1, 2):
i = self.i + x
j = self.j + y
if i > -1 and i < len(grid) and j > -1 and j < len(grid[1]):
neighbor = grid[i][j]
if neighbor.bomba:
total += 1
self.bombs_around = total
def flood(self):
for x in range(-1, 2):
for y in range(-1, 2):
i = self.i + x
j = self.j + y
if i > -1 and i < len(self.game.grid) and j > -1 and j < len(self.game.grid[1]):
neighbor = self.game.grid[i][j]
if not neighbor.revelada and not neighbor.flag_enabled and not self.game.is_game_over:
neighbor.reveal()
def enable_flag(self):
self.flag_enabled = not self.flag_enabled
if self.bomba: # TODO: and self.flag_enabled
self.game.score += 1
# TODO: else: self.game.score -= 1
# all the spots revealed shouldn't be a bomb
def draw_number(self):
"""
This function will draw the numbers according to the total of bombs around the cell.
Also it will give colors to some numbers
"""
text_color = (0, 0, 0)
if self.bombs_around == 1:
text_color = (0, 0, 150)
if self.bombs_around == 2:
text_color = (0, 150, 0)
if self.bombs_around == 3:
text_color = (150, 0, 0)
if self.bombs_around == 4:
text_color = (133, 39, 138)
if self.bombs_around == 5:
text_color = (128, 0, 0)
if self.bombs_around == 6:
text_color = (175, 238, 238)
if self.bombs_around == 7:
text_color = (0, 0, 0)
if self.bombs_around == 8:
text_color = (33, 161, 166)
font = pygame.font.Font("fonts/JetBrainsMono-Bold.ttf", 24)
if self.bombs_around > 0 and self.revelada:
text = font.render(
str(self.bombs_around), False, text_color)
self.game.screen.blit(text, (self.x + 12, self.y))
def set_bomb(self):
"""
This function will turn this cell into a cell with a bomb
(just to keep organized)
"""
self.bomba = True
def draw_cell(self):
pygame.draw.rect(
self.game.screen, WHITE, (self.x, self.y, TILESIZE - 1, TILESIZE - 1))
if self.revelada:
if self.bomba:
pygame.draw.rect(
self.game.screen, RED, (self.x + 10, self.y + 10, TILESIZE - 23, TILESIZE - 23))
else:
pygame.draw.rect(
self.game.screen, GRAY, (self.x, self.y, TILESIZE - 1, TILESIZE - 1))
if self.flag_enabled and not self.revelada:
self.game.flag.draw(self.game.screen, self.x + 10, self.y + 10)
def get_mouse_pos(self):
mouse = pygame.mouse.get_pos()
return [mouse[0] // TILESIZE, mouse[1] // TILESIZE]
|
normal
|
{
"blob_id": "e31f1e24c319f338d728661dfd50e758526112d6",
"index": 7796,
"step-1": "<mask token>\n\n\nclass Cell:\n <mask token>\n\n def reveal(self):\n if not self.game.is_game_over:\n self.revelada = True\n if self.bombs_around == 0:\n self.flood()\n if self.bomba:\n self.game.is_game_over = True\n self.game.score = 0\n EFFECT.play()\n\n def check_neighbours(self, grid):\n \"\"\"\n This function will count how many bombs there is around a particular cell\n \"\"\"\n if self.bomba:\n self.bombs_around = -1\n return\n total = 0\n for x in range(-1, 2):\n for y in range(-1, 2):\n i = self.i + x\n j = self.j + y\n if i > -1 and i < len(grid) and j > -1 and j < len(grid[1]):\n neighbor = grid[i][j]\n if neighbor.bomba:\n total += 1\n self.bombs_around = total\n <mask token>\n <mask token>\n\n def draw_number(self):\n \"\"\"\n This function will draw the numbers according to the total of bombs around the cell.\n Also it will give colors to some numbers\n \"\"\"\n text_color = 0, 0, 0\n if self.bombs_around == 1:\n text_color = 0, 0, 150\n if self.bombs_around == 2:\n text_color = 0, 150, 0\n if self.bombs_around == 3:\n text_color = 150, 0, 0\n if self.bombs_around == 4:\n text_color = 133, 39, 138\n if self.bombs_around == 5:\n text_color = 128, 0, 0\n if self.bombs_around == 6:\n text_color = 175, 238, 238\n if self.bombs_around == 7:\n text_color = 0, 0, 0\n if self.bombs_around == 8:\n text_color = 33, 161, 166\n font = pygame.font.Font('fonts/JetBrainsMono-Bold.ttf', 24)\n if self.bombs_around > 0 and self.revelada:\n text = font.render(str(self.bombs_around), False, text_color)\n self.game.screen.blit(text, (self.x + 12, self.y))\n <mask token>\n\n def draw_cell(self):\n pygame.draw.rect(self.game.screen, WHITE, (self.x, self.y, TILESIZE -\n 1, TILESIZE - 1))\n if self.revelada:\n if self.bomba:\n pygame.draw.rect(self.game.screen, RED, (self.x + 10, self.\n y + 10, TILESIZE - 23, TILESIZE - 23))\n else:\n pygame.draw.rect(self.game.screen, GRAY, (self.x, self.y, \n TILESIZE - 1, TILESIZE - 1))\n if self.flag_enabled and not self.revelada:\n self.game.flag.draw(self.game.screen, self.x + 10, self.y + 10)\n\n def get_mouse_pos(self):\n mouse = pygame.mouse.get_pos()\n return [mouse[0] // TILESIZE, mouse[1] // TILESIZE]\n",
"step-2": "<mask token>\n\n\nclass Cell:\n\n def __init__(self, game, x, y, bombs):\n self.game = game\n self.x = x\n self.y = y\n self.i = x // TILESIZE\n self.j = y // TILESIZE\n self.revelada = False\n self.bomba = False\n self.bombas_total = bombs\n self.bombs_around = 0\n self.flag_enabled = False\n\n def reveal(self):\n if not self.game.is_game_over:\n self.revelada = True\n if self.bombs_around == 0:\n self.flood()\n if self.bomba:\n self.game.is_game_over = True\n self.game.score = 0\n EFFECT.play()\n\n def check_neighbours(self, grid):\n \"\"\"\n This function will count how many bombs there is around a particular cell\n \"\"\"\n if self.bomba:\n self.bombs_around = -1\n return\n total = 0\n for x in range(-1, 2):\n for y in range(-1, 2):\n i = self.i + x\n j = self.j + y\n if i > -1 and i < len(grid) and j > -1 and j < len(grid[1]):\n neighbor = grid[i][j]\n if neighbor.bomba:\n total += 1\n self.bombs_around = total\n\n def flood(self):\n for x in range(-1, 2):\n for y in range(-1, 2):\n i = self.i + x\n j = self.j + y\n if i > -1 and i < len(self.game.grid) and j > -1 and j < len(\n self.game.grid[1]):\n neighbor = self.game.grid[i][j]\n if (not neighbor.revelada and not neighbor.flag_enabled and\n not self.game.is_game_over):\n neighbor.reveal()\n\n def enable_flag(self):\n self.flag_enabled = not self.flag_enabled\n if self.bomba:\n self.game.score += 1\n\n def draw_number(self):\n \"\"\"\n This function will draw the numbers according to the total of bombs around the cell.\n Also it will give colors to some numbers\n \"\"\"\n text_color = 0, 0, 0\n if self.bombs_around == 1:\n text_color = 0, 0, 150\n if self.bombs_around == 2:\n text_color = 0, 150, 0\n if self.bombs_around == 3:\n text_color = 150, 0, 0\n if self.bombs_around == 4:\n text_color = 133, 39, 138\n if self.bombs_around == 5:\n text_color = 128, 0, 0\n if self.bombs_around == 6:\n text_color = 175, 238, 238\n if self.bombs_around == 7:\n text_color = 0, 0, 0\n if self.bombs_around == 8:\n text_color = 33, 161, 166\n font = pygame.font.Font('fonts/JetBrainsMono-Bold.ttf', 24)\n if self.bombs_around > 0 and self.revelada:\n text = font.render(str(self.bombs_around), False, text_color)\n self.game.screen.blit(text, (self.x + 12, self.y))\n <mask token>\n\n def draw_cell(self):\n pygame.draw.rect(self.game.screen, WHITE, (self.x, self.y, TILESIZE -\n 1, TILESIZE - 1))\n if self.revelada:\n if self.bomba:\n pygame.draw.rect(self.game.screen, RED, (self.x + 10, self.\n y + 10, TILESIZE - 23, TILESIZE - 23))\n else:\n pygame.draw.rect(self.game.screen, GRAY, (self.x, self.y, \n TILESIZE - 1, TILESIZE - 1))\n if self.flag_enabled and not self.revelada:\n self.game.flag.draw(self.game.screen, self.x + 10, self.y + 10)\n\n def get_mouse_pos(self):\n mouse = pygame.mouse.get_pos()\n return [mouse[0] // TILESIZE, mouse[1] // TILESIZE]\n",
"step-3": "<mask token>\n\n\nclass Cell:\n\n def __init__(self, game, x, y, bombs):\n self.game = game\n self.x = x\n self.y = y\n self.i = x // TILESIZE\n self.j = y // TILESIZE\n self.revelada = False\n self.bomba = False\n self.bombas_total = bombs\n self.bombs_around = 0\n self.flag_enabled = False\n\n def reveal(self):\n if not self.game.is_game_over:\n self.revelada = True\n if self.bombs_around == 0:\n self.flood()\n if self.bomba:\n self.game.is_game_over = True\n self.game.score = 0\n EFFECT.play()\n\n def check_neighbours(self, grid):\n \"\"\"\n This function will count how many bombs there is around a particular cell\n \"\"\"\n if self.bomba:\n self.bombs_around = -1\n return\n total = 0\n for x in range(-1, 2):\n for y in range(-1, 2):\n i = self.i + x\n j = self.j + y\n if i > -1 and i < len(grid) and j > -1 and j < len(grid[1]):\n neighbor = grid[i][j]\n if neighbor.bomba:\n total += 1\n self.bombs_around = total\n\n def flood(self):\n for x in range(-1, 2):\n for y in range(-1, 2):\n i = self.i + x\n j = self.j + y\n if i > -1 and i < len(self.game.grid) and j > -1 and j < len(\n self.game.grid[1]):\n neighbor = self.game.grid[i][j]\n if (not neighbor.revelada and not neighbor.flag_enabled and\n not self.game.is_game_over):\n neighbor.reveal()\n\n def enable_flag(self):\n self.flag_enabled = not self.flag_enabled\n if self.bomba:\n self.game.score += 1\n\n def draw_number(self):\n \"\"\"\n This function will draw the numbers according to the total of bombs around the cell.\n Also it will give colors to some numbers\n \"\"\"\n text_color = 0, 0, 0\n if self.bombs_around == 1:\n text_color = 0, 0, 150\n if self.bombs_around == 2:\n text_color = 0, 150, 0\n if self.bombs_around == 3:\n text_color = 150, 0, 0\n if self.bombs_around == 4:\n text_color = 133, 39, 138\n if self.bombs_around == 5:\n text_color = 128, 0, 0\n if self.bombs_around == 6:\n text_color = 175, 238, 238\n if self.bombs_around == 7:\n text_color = 0, 0, 0\n if self.bombs_around == 8:\n text_color = 33, 161, 166\n font = pygame.font.Font('fonts/JetBrainsMono-Bold.ttf', 24)\n if self.bombs_around > 0 and self.revelada:\n text = font.render(str(self.bombs_around), False, text_color)\n self.game.screen.blit(text, (self.x + 12, self.y))\n\n def set_bomb(self):\n \"\"\"\n This function will turn this cell into a cell with a bomb \n (just to keep organized)\n \"\"\"\n self.bomba = True\n\n def draw_cell(self):\n pygame.draw.rect(self.game.screen, WHITE, (self.x, self.y, TILESIZE -\n 1, TILESIZE - 1))\n if self.revelada:\n if self.bomba:\n pygame.draw.rect(self.game.screen, RED, (self.x + 10, self.\n y + 10, TILESIZE - 23, TILESIZE - 23))\n else:\n pygame.draw.rect(self.game.screen, GRAY, (self.x, self.y, \n TILESIZE - 1, TILESIZE - 1))\n if self.flag_enabled and not self.revelada:\n self.game.flag.draw(self.game.screen, self.x + 10, self.y + 10)\n\n def get_mouse_pos(self):\n mouse = pygame.mouse.get_pos()\n return [mouse[0] // TILESIZE, mouse[1] // TILESIZE]\n",
"step-4": "import pygame\nfrom settings import *\nimport random\n\n\nclass Cell:\n\n def __init__(self, game, x, y, bombs):\n self.game = game\n self.x = x\n self.y = y\n self.i = x // TILESIZE\n self.j = y // TILESIZE\n self.revelada = False\n self.bomba = False\n self.bombas_total = bombs\n self.bombs_around = 0\n self.flag_enabled = False\n\n def reveal(self):\n if not self.game.is_game_over:\n self.revelada = True\n if self.bombs_around == 0:\n self.flood()\n if self.bomba:\n self.game.is_game_over = True\n self.game.score = 0\n EFFECT.play()\n\n def check_neighbours(self, grid):\n \"\"\"\n This function will count how many bombs there is around a particular cell\n \"\"\"\n if self.bomba:\n self.bombs_around = -1\n return\n total = 0\n for x in range(-1, 2):\n for y in range(-1, 2):\n i = self.i + x\n j = self.j + y\n if i > -1 and i < len(grid) and j > -1 and j < len(grid[1]):\n neighbor = grid[i][j]\n if neighbor.bomba:\n total += 1\n self.bombs_around = total\n\n def flood(self):\n for x in range(-1, 2):\n for y in range(-1, 2):\n i = self.i + x\n j = self.j + y\n if i > -1 and i < len(self.game.grid) and j > -1 and j < len(\n self.game.grid[1]):\n neighbor = self.game.grid[i][j]\n if (not neighbor.revelada and not neighbor.flag_enabled and\n not self.game.is_game_over):\n neighbor.reveal()\n\n def enable_flag(self):\n self.flag_enabled = not self.flag_enabled\n if self.bomba:\n self.game.score += 1\n\n def draw_number(self):\n \"\"\"\n This function will draw the numbers according to the total of bombs around the cell.\n Also it will give colors to some numbers\n \"\"\"\n text_color = 0, 0, 0\n if self.bombs_around == 1:\n text_color = 0, 0, 150\n if self.bombs_around == 2:\n text_color = 0, 150, 0\n if self.bombs_around == 3:\n text_color = 150, 0, 0\n if self.bombs_around == 4:\n text_color = 133, 39, 138\n if self.bombs_around == 5:\n text_color = 128, 0, 0\n if self.bombs_around == 6:\n text_color = 175, 238, 238\n if self.bombs_around == 7:\n text_color = 0, 0, 0\n if self.bombs_around == 8:\n text_color = 33, 161, 166\n font = pygame.font.Font('fonts/JetBrainsMono-Bold.ttf', 24)\n if self.bombs_around > 0 and self.revelada:\n text = font.render(str(self.bombs_around), False, text_color)\n self.game.screen.blit(text, (self.x + 12, self.y))\n\n def set_bomb(self):\n \"\"\"\n This function will turn this cell into a cell with a bomb \n (just to keep organized)\n \"\"\"\n self.bomba = True\n\n def draw_cell(self):\n pygame.draw.rect(self.game.screen, WHITE, (self.x, self.y, TILESIZE -\n 1, TILESIZE - 1))\n if self.revelada:\n if self.bomba:\n pygame.draw.rect(self.game.screen, RED, (self.x + 10, self.\n y + 10, TILESIZE - 23, TILESIZE - 23))\n else:\n pygame.draw.rect(self.game.screen, GRAY, (self.x, self.y, \n TILESIZE - 1, TILESIZE - 1))\n if self.flag_enabled and not self.revelada:\n self.game.flag.draw(self.game.screen, self.x + 10, self.y + 10)\n\n def get_mouse_pos(self):\n mouse = pygame.mouse.get_pos()\n return [mouse[0] // TILESIZE, mouse[1] // TILESIZE]\n",
"step-5": "import pygame\nfrom settings import *\nimport random\n\n\nclass Cell:\n def __init__(self, game, x, y, bombs):\n self.game = game\n self.x = x\n self.y = y\n self.i = x // TILESIZE\n self.j = y // TILESIZE\n self.revelada = False\n self.bomba = False\n self.bombas_total = bombs\n self.bombs_around = 0\n self.flag_enabled = False\n\n def reveal(self):\n if not self.game.is_game_over:\n self.revelada = True\n\n if self.bombs_around == 0:\n self.flood()\n if self.bomba:\n self.game.is_game_over = True\n self.game.score = 0\n EFFECT.play()\n\n def check_neighbours(self, grid):\n \"\"\"\n This function will count how many bombs there is around a particular cell\n \"\"\"\n if self.bomba:\n self.bombs_around = -1\n return\n\n total = 0\n for x in range(-1, 2):\n for y in range(-1, 2):\n i = self.i + x\n j = self.j + y\n if i > -1 and i < len(grid) and j > -1 and j < len(grid[1]):\n neighbor = grid[i][j]\n\n if neighbor.bomba:\n total += 1\n \n self.bombs_around = total\n\n def flood(self):\n for x in range(-1, 2):\n for y in range(-1, 2):\n i = self.i + x\n j = self.j + y\n if i > -1 and i < len(self.game.grid) and j > -1 and j < len(self.game.grid[1]):\n neighbor = self.game.grid[i][j]\n\n if not neighbor.revelada and not neighbor.flag_enabled and not self.game.is_game_over:\n neighbor.reveal()\n\n def enable_flag(self):\n self.flag_enabled = not self.flag_enabled\n if self.bomba: # TODO: and self.flag_enabled\n self.game.score += 1\n # TODO: else: self.game.score -= 1\n # all the spots revealed shouldn't be a bomb\n\n def draw_number(self):\n \"\"\"\n This function will draw the numbers according to the total of bombs around the cell.\n Also it will give colors to some numbers\n \"\"\"\n text_color = (0, 0, 0)\n if self.bombs_around == 1:\n text_color = (0, 0, 150)\n if self.bombs_around == 2:\n text_color = (0, 150, 0)\n if self.bombs_around == 3:\n text_color = (150, 0, 0)\n if self.bombs_around == 4:\n text_color = (133, 39, 138)\n if self.bombs_around == 5:\n text_color = (128, 0, 0)\n if self.bombs_around == 6:\n text_color = (175, 238, 238)\n if self.bombs_around == 7:\n text_color = (0, 0, 0)\n if self.bombs_around == 8:\n text_color = (33, 161, 166)\n\n font = pygame.font.Font(\"fonts/JetBrainsMono-Bold.ttf\", 24)\n if self.bombs_around > 0 and self.revelada:\n text = font.render(\n str(self.bombs_around), False, text_color)\n self.game.screen.blit(text, (self.x + 12, self.y))\n\n def set_bomb(self):\n \"\"\"\n This function will turn this cell into a cell with a bomb \n (just to keep organized)\n \"\"\"\n self.bomba = True\n\n def draw_cell(self):\n\n pygame.draw.rect(\n self.game.screen, WHITE, (self.x, self.y, TILESIZE - 1, TILESIZE - 1))\n\n if self.revelada:\n if self.bomba:\n pygame.draw.rect(\n self.game.screen, RED, (self.x + 10, self.y + 10, TILESIZE - 23, TILESIZE - 23))\n else:\n pygame.draw.rect(\n self.game.screen, GRAY, (self.x, self.y, TILESIZE - 1, TILESIZE - 1))\n if self.flag_enabled and not self.revelada:\n self.game.flag.draw(self.game.screen, self.x + 10, self.y + 10)\n\n def get_mouse_pos(self):\n mouse = pygame.mouse.get_pos()\n return [mouse[0] // TILESIZE, mouse[1] // TILESIZE]\n",
"step-ids": [
6,
9,
10,
11,
12
]
}
|
[
6,
9,
10,
11,
12
] |
from flask import Flask, request, render_template, redirect
import os
import smtplib
from email.message import EmailMessage
app = Flask(__name__)
EMAIL_ADDRESS = os.environ.get('EMAIL_USER')
EMAIL_PASSWORD = os.environ.get('EMAIL_PASS')
@app.route('/')
def index():
return render_template('index.html')
@app.route('/submit', methods=['POST'])
def submit():
if request.method == 'POST':
name = request.form['name']
email = request.form['email']
subject = request.form['subject']
message = request.form['message']
msg = EmailMessage()
msg['From'] = email
msg['To'] = EMAIL_ADDRESS
msg['Subject'] = subject
msg.set_content(message)
with smtplib.SMTP_SSL('smtp.gmail.com', 465) as smtp:
smtp.login(EMAIL_ADDRESS, EMAIL_PASSWORD)
smtp.send_message(msg)
return render_template('success.html')
return render_template('index.html')
if __name__ == '__main__':
app.run()
|
normal
|
{
"blob_id": "27d9e6a868cfc18780ec9615e8dbc3b5ea2fd0c3",
"index": 1399,
"step-1": "<mask token>\n\n\[email protected]('/')\ndef index():\n return render_template('index.html')\n\n\[email protected]('/submit', methods=['POST'])\ndef submit():\n if request.method == 'POST':\n name = request.form['name']\n email = request.form['email']\n subject = request.form['subject']\n message = request.form['message']\n msg = EmailMessage()\n msg['From'] = email\n msg['To'] = EMAIL_ADDRESS\n msg['Subject'] = subject\n msg.set_content(message)\n with smtplib.SMTP_SSL('smtp.gmail.com', 465) as smtp:\n smtp.login(EMAIL_ADDRESS, EMAIL_PASSWORD)\n smtp.send_message(msg)\n return render_template('success.html')\n return render_template('index.html')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/')\ndef index():\n return render_template('index.html')\n\n\[email protected]('/submit', methods=['POST'])\ndef submit():\n if request.method == 'POST':\n name = request.form['name']\n email = request.form['email']\n subject = request.form['subject']\n message = request.form['message']\n msg = EmailMessage()\n msg['From'] = email\n msg['To'] = EMAIL_ADDRESS\n msg['Subject'] = subject\n msg.set_content(message)\n with smtplib.SMTP_SSL('smtp.gmail.com', 465) as smtp:\n smtp.login(EMAIL_ADDRESS, EMAIL_PASSWORD)\n smtp.send_message(msg)\n return render_template('success.html')\n return render_template('index.html')\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-3": "<mask token>\napp = Flask(__name__)\nEMAIL_ADDRESS = os.environ.get('EMAIL_USER')\nEMAIL_PASSWORD = os.environ.get('EMAIL_PASS')\n\n\[email protected]('/')\ndef index():\n return render_template('index.html')\n\n\[email protected]('/submit', methods=['POST'])\ndef submit():\n if request.method == 'POST':\n name = request.form['name']\n email = request.form['email']\n subject = request.form['subject']\n message = request.form['message']\n msg = EmailMessage()\n msg['From'] = email\n msg['To'] = EMAIL_ADDRESS\n msg['Subject'] = subject\n msg.set_content(message)\n with smtplib.SMTP_SSL('smtp.gmail.com', 465) as smtp:\n smtp.login(EMAIL_ADDRESS, EMAIL_PASSWORD)\n smtp.send_message(msg)\n return render_template('success.html')\n return render_template('index.html')\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-4": "from flask import Flask, request, render_template, redirect\nimport os\nimport smtplib\nfrom email.message import EmailMessage\napp = Flask(__name__)\nEMAIL_ADDRESS = os.environ.get('EMAIL_USER')\nEMAIL_PASSWORD = os.environ.get('EMAIL_PASS')\n\n\[email protected]('/')\ndef index():\n return render_template('index.html')\n\n\[email protected]('/submit', methods=['POST'])\ndef submit():\n if request.method == 'POST':\n name = request.form['name']\n email = request.form['email']\n subject = request.form['subject']\n message = request.form['message']\n msg = EmailMessage()\n msg['From'] = email\n msg['To'] = EMAIL_ADDRESS\n msg['Subject'] = subject\n msg.set_content(message)\n with smtplib.SMTP_SSL('smtp.gmail.com', 465) as smtp:\n smtp.login(EMAIL_ADDRESS, EMAIL_PASSWORD)\n smtp.send_message(msg)\n return render_template('success.html')\n return render_template('index.html')\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
y = 10
x = 'Тишь да гладь'
print(f'Текст:{x}')
print(f'Число:{y}')
a1 = input('Введите первое число: ')
a2 = input('Введите второе число: ')
b1 = input('Введите первую строку: ')
b2 = input('Введите вторую строку: ')
print(f'Вы ввели числа: {a1}/{a2}')
print(f'Вы ввели строки: {b1} / {b2}')
|
normal
|
{
"blob_id": "2fabb03f0f6b0b297245354782e650380509424b",
"index": 8054,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(f'Текст:{x}')\nprint(f'Число:{y}')\n<mask token>\nprint(f'Вы ввели числа: {a1}/{a2}')\nprint(f'Вы ввели строки: {b1} / {b2}')\n",
"step-3": "y = 10\nx = 'Тишь да гладь'\nprint(f'Текст:{x}')\nprint(f'Число:{y}')\na1 = input('Введите первое число: ')\na2 = input('Введите второе число: ')\nb1 = input('Введите первую строку: ')\nb2 = input('Введите вторую строку: ')\nprint(f'Вы ввели числа: {a1}/{a2}')\nprint(f'Вы ввели строки: {b1} / {b2}')\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def containsNearbyDuplicate(self, nums, k):
""" Time complexity: O(n). Space complexity: O(n), n is len(nums).
"""
nums_dict = dict()
for i, n in enumerate(nums):
if n in nums_dict and abs(nums_dict[n] - i) <= k:
return True
nums_dict[n] = i
return False
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def containsNearbyDuplicate(self, nums, k):
""" Time complexity: O(n). Space complexity: O(n), n is len(nums).
"""
nums_dict = dict()
for i, n in enumerate(nums):
if n in nums_dict and abs(nums_dict[n] - i) <= k:
return True
nums_dict[n] = i
return False
if __name__ == '__main__':
sol = Solution()
nums = [1, 2, 3, 4, 1, 6, 8]
k = 4
print(sol.containsNearbyDuplicate(nums, k))
<|reserved_special_token_1|>
""" Problem statement:
https://leetcode.com/problems/contains-duplicate-ii/description/
Given an array of integers and an integer k, find out whether
there are two distinct indices i and j in the array such that nums[i] = nums[j]
and the absolute difference between i and j is at most k.
"""
class Solution:
def containsNearbyDuplicate(self, nums, k):
""" Time complexity: O(n). Space complexity: O(n), n is len(nums).
"""
nums_dict = dict() # integer: most recent index
for i, n in enumerate(nums):
if n in nums_dict and abs(nums_dict[n] - i) <= k:
return True
nums_dict[n] = i # update index of integer n in dictionary
return False
if __name__ == "__main__":
sol = Solution()
nums = [1, 2, 3, 4, 1, 6, 8]
k = 4
print(sol.containsNearbyDuplicate(nums, k))
|
flexible
|
{
"blob_id": "33c241747062ab0d374082d2a8179335503fa212",
"index": 3320,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def containsNearbyDuplicate(self, nums, k):\n \"\"\" Time complexity: O(n). Space complexity: O(n), n is len(nums).\n \"\"\"\n nums_dict = dict()\n for i, n in enumerate(nums):\n if n in nums_dict and abs(nums_dict[n] - i) <= k:\n return True\n nums_dict[n] = i\n return False\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Solution:\n\n def containsNearbyDuplicate(self, nums, k):\n \"\"\" Time complexity: O(n). Space complexity: O(n), n is len(nums).\n \"\"\"\n nums_dict = dict()\n for i, n in enumerate(nums):\n if n in nums_dict and abs(nums_dict[n] - i) <= k:\n return True\n nums_dict[n] = i\n return False\n\n\nif __name__ == '__main__':\n sol = Solution()\n nums = [1, 2, 3, 4, 1, 6, 8]\n k = 4\n print(sol.containsNearbyDuplicate(nums, k))\n",
"step-5": "\"\"\" Problem statement:\nhttps://leetcode.com/problems/contains-duplicate-ii/description/\n\nGiven an array of integers and an integer k, find out whether\nthere are two distinct indices i and j in the array such that nums[i] = nums[j]\nand the absolute difference between i and j is at most k.\n\"\"\"\n\n\nclass Solution:\n def containsNearbyDuplicate(self, nums, k):\n \"\"\" Time complexity: O(n). Space complexity: O(n), n is len(nums).\n \"\"\"\n nums_dict = dict() # integer: most recent index\n for i, n in enumerate(nums):\n if n in nums_dict and abs(nums_dict[n] - i) <= k:\n return True\n nums_dict[n] = i # update index of integer n in dictionary\n return False\n\n\nif __name__ == \"__main__\":\n sol = Solution()\n nums = [1, 2, 3, 4, 1, 6, 8]\n k = 4\n print(sol.containsNearbyDuplicate(nums, k))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import pygame
def play_file(name,loop=0,time=0.0):
try: #if image exists
file='data/audio/'+name
pygame.mixer.music.load(file)
pygame.mixer.music.play(loop, time)
except ZeroDivisionError: #if image doesn't exist
print('AudioLoading: failed to load ' + name)
try:
file = 'data/audio/error.aud'
pygame.mixer.music.load(file)
pygame.mixer.music.play(loop, time)
except ZeroDivisionError:
print( 'Can not load file: '+name)
raise SystemExit()
|
normal
|
{
"blob_id": "98940c898d58917e652fe1514ea758768b048dbc",
"index": 9601,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef play_file(name, loop=0, time=0.0):\n try:\n file = 'data/audio/' + name\n pygame.mixer.music.load(file)\n pygame.mixer.music.play(loop, time)\n except ZeroDivisionError:\n print('AudioLoading: failed to load ' + name)\n try:\n file = 'data/audio/error.aud'\n pygame.mixer.music.load(file)\n pygame.mixer.music.play(loop, time)\n except ZeroDivisionError:\n print('Can not load file: ' + name)\n raise SystemExit()\n",
"step-3": "import pygame\n\n\ndef play_file(name, loop=0, time=0.0):\n try:\n file = 'data/audio/' + name\n pygame.mixer.music.load(file)\n pygame.mixer.music.play(loop, time)\n except ZeroDivisionError:\n print('AudioLoading: failed to load ' + name)\n try:\n file = 'data/audio/error.aud'\n pygame.mixer.music.load(file)\n pygame.mixer.music.play(loop, time)\n except ZeroDivisionError:\n print('Can not load file: ' + name)\n raise SystemExit()\n",
"step-4": "import pygame\n\n\ndef play_file(name,loop=0,time=0.0):\n try: #if image exists\n file='data/audio/'+name\n pygame.mixer.music.load(file)\n pygame.mixer.music.play(loop, time)\n except ZeroDivisionError: #if image doesn't exist\n print('AudioLoading: failed to load ' + name)\n try:\n file = 'data/audio/error.aud'\n pygame.mixer.music.load(file)\n pygame.mixer.music.play(loop, time)\n except ZeroDivisionError:\n print( 'Can not load file: '+name)\n raise SystemExit()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
urlpatterns = [url('^$', views.showberanda, name='showberanda'), url(
'^sentimenanalisis/$', views.showsentimenanalisis, name=
'showsentimenanalisis'), url('^bantuan/$', views.showbantuan, name=
'showbantuan'), url('^tweets/', views.get_tweets)]
<|reserved_special_token_1|>
from django.conf.urls import url
from . import views
from . import admin
urlpatterns = [url('^$', views.showberanda, name='showberanda'), url(
'^sentimenanalisis/$', views.showsentimenanalisis, name=
'showsentimenanalisis'), url('^bantuan/$', views.showbantuan, name=
'showbantuan'), url('^tweets/', views.get_tweets)]
<|reserved_special_token_1|>
from django.conf.urls import url
from . import views
from .import admin
urlpatterns = [
url(r'^$', views.showberanda, name='showberanda'),
url(r'^sentimenanalisis/$', views.showsentimenanalisis, name='showsentimenanalisis'),
url(r'^bantuan/$', views.showbantuan, name='showbantuan'),
url(r'^tweets/', views.get_tweets),
]
|
flexible
|
{
"blob_id": "077c596f71aae22e85589fdaf78d5cdae8085443",
"index": 8710,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [url('^$', views.showberanda, name='showberanda'), url(\n '^sentimenanalisis/$', views.showsentimenanalisis, name=\n 'showsentimenanalisis'), url('^bantuan/$', views.showbantuan, name=\n 'showbantuan'), url('^tweets/', views.get_tweets)]\n",
"step-3": "from django.conf.urls import url\nfrom . import views\nfrom . import admin\nurlpatterns = [url('^$', views.showberanda, name='showberanda'), url(\n '^sentimenanalisis/$', views.showsentimenanalisis, name=\n 'showsentimenanalisis'), url('^bantuan/$', views.showbantuan, name=\n 'showbantuan'), url('^tweets/', views.get_tweets)]\n",
"step-4": "from django.conf.urls import url\nfrom . import views\nfrom .import admin\n\nurlpatterns = [\n url(r'^$', views.showberanda, name='showberanda'),\n url(r'^sentimenanalisis/$', views.showsentimenanalisis, name='showsentimenanalisis'),\n url(r'^bantuan/$', views.showbantuan, name='showbantuan'),\n url(r'^tweets/', views.get_tweets),\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import warnings
from functools import wraps
import re
import logging
import pandas as pd
import requests
def return_df(field="data"):
"""return DataFrame data"""
def decorator(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
resp = func(self, *args, **kwargs)
if resp.get("code") == 200 and self.return_df is True:
df = pd.DataFrame(resp["resp"][field])
if "date" in df.columns:
df['date'] = df['date'].apply(lambda x: datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S"))
df = df.set_index("date")
return df
return resp
return wrapper
return decorator
class RQOpenClient(object):
def __init__(self, username, password, logger=None, log_level=logging.DEBUG,
base_url="https://rqopen.ricequant.com", timeout=(5, 10), return_df=True):
"""
:param username: 登录账户
:param password: 密码
:param logger: 日志
:param log_level: 日志级别
:param base_url: 服务地址,默认web端 rqpro2.0需要单独配置
:param timeout: 超时时间
:param return_df: 返回数据是否为DataFrame False返回dict
"""
self.base_url = base_url
# tel number need "+86"
if re.match(r'^[1]([3-9])[0-9]{9}$', username):
username = "+86" + username
self.username = username
self.password = password
self.client = requests.Session()
self.logger = logger if logger else logging.getLogger("RQOpenClient")
self.logger.setLevel(log_level)
self.timeout = timeout
self.return_df = return_df
def login(self):
self.logger.info("Try login. Username {}".format(self.username))
resp = self.client.post("{}/login".format(self.base_url),
{"username": self.username, "password": self.password}, timeout=self.timeout)
ret = resp.json()
self.logger.info("Login response {}".format(ret))
return ret
def _do(self, func, *args, **kwargs):
resp = func(*args, **kwargs)
if resp["code"] == 401:
login_resp = self.login()
if login_resp["code"] == 200:
self.logger.info("login success")
else:
return login_resp
elif resp["code"] == 200:
return resp
resp = func(*args, **kwargs)
return resp
def get_day_trades(self, run_id):
warnings.warn("get_day_trades will be abandoned, please use current_trades", DeprecationWarning)
return self._do(self._get_day_trades, run_id)
def get_positions(self, run_id):
warnings.warn("current_positions will be abandoned, please use current_positions", DeprecationWarning)
return self._do(self._get_positions, run_id)
def _get_day_trades(self, run_id):
resp = self.client.get("{}/pt/load_day_trades/{}".format(self.base_url, run_id), timeout=self.timeout)
return resp.json()
def _get_positions(self, run_id):
resp = self.client.get("{}/pt/load_current_positions/{}".format(self.base_url, run_id), timeout=self.timeout)
return resp.json()
# base
@return_df()
def trades(self, run_id):
"""get all trades"""
return self._do(self._get_base, "trades", run_id)
@return_df()
def positions(self, run_id):
"""get all positions (market_value)"""
return self._do(self._get_base, "positions", run_id)
@return_df()
def portfolio(self, run_id):
"""get all portfolio"""
return self._do(self._get_base, "portfolio", run_id)
@return_df("positions")
def current_positions(self, run_id):
"""get current positions"""
return self._do(self._get_base, "pt/load_current_positions", run_id)
@return_df("trades")
def current_trades(self, run_id):
"""get current positions"""
return self._do(self._get_base, "pt/load_day_trades", run_id)
def _get_base(self, api_path, run_id):
resp = self.client.get("{}/{}/{}".format(self.base_url, api_path, run_id), timeout=self.timeout)
return resp.json()
|
normal
|
{
"blob_id": "bd2edd5139a9c5050c582a54cdacca2b0739f333",
"index": 9151,
"step-1": "<mask token>\n\n\nclass RQOpenClient(object):\n\n def __init__(self, username, password, logger=None, log_level=logging.\n DEBUG, base_url='https://rqopen.ricequant.com', timeout=(5, 10),\n return_df=True):\n \"\"\"\n :param username: 登录账户\n :param password: 密码\n :param logger: 日志\n :param log_level: 日志级别\n :param base_url: 服务地址,默认web端 rqpro2.0需要单独配置\n :param timeout: 超时时间\n :param return_df: 返回数据是否为DataFrame False返回dict\n \"\"\"\n self.base_url = base_url\n if re.match('^[1]([3-9])[0-9]{9}$', username):\n username = '+86' + username\n self.username = username\n self.password = password\n self.client = requests.Session()\n self.logger = logger if logger else logging.getLogger('RQOpenClient')\n self.logger.setLevel(log_level)\n self.timeout = timeout\n self.return_df = return_df\n <mask token>\n\n def _do(self, func, *args, **kwargs):\n resp = func(*args, **kwargs)\n if resp['code'] == 401:\n login_resp = self.login()\n if login_resp['code'] == 200:\n self.logger.info('login success')\n else:\n return login_resp\n elif resp['code'] == 200:\n return resp\n resp = func(*args, **kwargs)\n return resp\n\n def get_day_trades(self, run_id):\n warnings.warn(\n 'get_day_trades will be abandoned, please use current_trades',\n DeprecationWarning)\n return self._do(self._get_day_trades, run_id)\n\n def get_positions(self, run_id):\n warnings.warn(\n 'current_positions will be abandoned, please use current_positions'\n , DeprecationWarning)\n return self._do(self._get_positions, run_id)\n\n def _get_day_trades(self, run_id):\n resp = self.client.get('{}/pt/load_day_trades/{}'.format(self.\n base_url, run_id), timeout=self.timeout)\n return resp.json()\n <mask token>\n\n @return_df()\n def trades(self, run_id):\n \"\"\"get all trades\"\"\"\n return self._do(self._get_base, 'trades', run_id)\n\n @return_df()\n def positions(self, run_id):\n \"\"\"get all positions (market_value)\"\"\"\n return self._do(self._get_base, 'positions', run_id)\n\n @return_df()\n def portfolio(self, run_id):\n \"\"\"get all portfolio\"\"\"\n return self._do(self._get_base, 'portfolio', run_id)\n\n @return_df('positions')\n def current_positions(self, run_id):\n \"\"\"get current positions\"\"\"\n return self._do(self._get_base, 'pt/load_current_positions', run_id)\n <mask token>\n\n def _get_base(self, api_path, run_id):\n resp = self.client.get('{}/{}/{}'.format(self.base_url, api_path,\n run_id), timeout=self.timeout)\n return resp.json()\n",
"step-2": "<mask token>\n\n\nclass RQOpenClient(object):\n\n def __init__(self, username, password, logger=None, log_level=logging.\n DEBUG, base_url='https://rqopen.ricequant.com', timeout=(5, 10),\n return_df=True):\n \"\"\"\n :param username: 登录账户\n :param password: 密码\n :param logger: 日志\n :param log_level: 日志级别\n :param base_url: 服务地址,默认web端 rqpro2.0需要单独配置\n :param timeout: 超时时间\n :param return_df: 返回数据是否为DataFrame False返回dict\n \"\"\"\n self.base_url = base_url\n if re.match('^[1]([3-9])[0-9]{9}$', username):\n username = '+86' + username\n self.username = username\n self.password = password\n self.client = requests.Session()\n self.logger = logger if logger else logging.getLogger('RQOpenClient')\n self.logger.setLevel(log_level)\n self.timeout = timeout\n self.return_df = return_df\n\n def login(self):\n self.logger.info('Try login. Username {}'.format(self.username))\n resp = self.client.post('{}/login'.format(self.base_url), {\n 'username': self.username, 'password': self.password}, timeout=\n self.timeout)\n ret = resp.json()\n self.logger.info('Login response {}'.format(ret))\n return ret\n\n def _do(self, func, *args, **kwargs):\n resp = func(*args, **kwargs)\n if resp['code'] == 401:\n login_resp = self.login()\n if login_resp['code'] == 200:\n self.logger.info('login success')\n else:\n return login_resp\n elif resp['code'] == 200:\n return resp\n resp = func(*args, **kwargs)\n return resp\n\n def get_day_trades(self, run_id):\n warnings.warn(\n 'get_day_trades will be abandoned, please use current_trades',\n DeprecationWarning)\n return self._do(self._get_day_trades, run_id)\n\n def get_positions(self, run_id):\n warnings.warn(\n 'current_positions will be abandoned, please use current_positions'\n , DeprecationWarning)\n return self._do(self._get_positions, run_id)\n\n def _get_day_trades(self, run_id):\n resp = self.client.get('{}/pt/load_day_trades/{}'.format(self.\n base_url, run_id), timeout=self.timeout)\n return resp.json()\n <mask token>\n\n @return_df()\n def trades(self, run_id):\n \"\"\"get all trades\"\"\"\n return self._do(self._get_base, 'trades', run_id)\n\n @return_df()\n def positions(self, run_id):\n \"\"\"get all positions (market_value)\"\"\"\n return self._do(self._get_base, 'positions', run_id)\n\n @return_df()\n def portfolio(self, run_id):\n \"\"\"get all portfolio\"\"\"\n return self._do(self._get_base, 'portfolio', run_id)\n\n @return_df('positions')\n def current_positions(self, run_id):\n \"\"\"get current positions\"\"\"\n return self._do(self._get_base, 'pt/load_current_positions', run_id)\n <mask token>\n\n def _get_base(self, api_path, run_id):\n resp = self.client.get('{}/{}/{}'.format(self.base_url, api_path,\n run_id), timeout=self.timeout)\n return resp.json()\n",
"step-3": "<mask token>\n\n\nclass RQOpenClient(object):\n\n def __init__(self, username, password, logger=None, log_level=logging.\n DEBUG, base_url='https://rqopen.ricequant.com', timeout=(5, 10),\n return_df=True):\n \"\"\"\n :param username: 登录账户\n :param password: 密码\n :param logger: 日志\n :param log_level: 日志级别\n :param base_url: 服务地址,默认web端 rqpro2.0需要单独配置\n :param timeout: 超时时间\n :param return_df: 返回数据是否为DataFrame False返回dict\n \"\"\"\n self.base_url = base_url\n if re.match('^[1]([3-9])[0-9]{9}$', username):\n username = '+86' + username\n self.username = username\n self.password = password\n self.client = requests.Session()\n self.logger = logger if logger else logging.getLogger('RQOpenClient')\n self.logger.setLevel(log_level)\n self.timeout = timeout\n self.return_df = return_df\n\n def login(self):\n self.logger.info('Try login. Username {}'.format(self.username))\n resp = self.client.post('{}/login'.format(self.base_url), {\n 'username': self.username, 'password': self.password}, timeout=\n self.timeout)\n ret = resp.json()\n self.logger.info('Login response {}'.format(ret))\n return ret\n\n def _do(self, func, *args, **kwargs):\n resp = func(*args, **kwargs)\n if resp['code'] == 401:\n login_resp = self.login()\n if login_resp['code'] == 200:\n self.logger.info('login success')\n else:\n return login_resp\n elif resp['code'] == 200:\n return resp\n resp = func(*args, **kwargs)\n return resp\n\n def get_day_trades(self, run_id):\n warnings.warn(\n 'get_day_trades will be abandoned, please use current_trades',\n DeprecationWarning)\n return self._do(self._get_day_trades, run_id)\n\n def get_positions(self, run_id):\n warnings.warn(\n 'current_positions will be abandoned, please use current_positions'\n , DeprecationWarning)\n return self._do(self._get_positions, run_id)\n\n def _get_day_trades(self, run_id):\n resp = self.client.get('{}/pt/load_day_trades/{}'.format(self.\n base_url, run_id), timeout=self.timeout)\n return resp.json()\n\n def _get_positions(self, run_id):\n resp = self.client.get('{}/pt/load_current_positions/{}'.format(\n self.base_url, run_id), timeout=self.timeout)\n return resp.json()\n\n @return_df()\n def trades(self, run_id):\n \"\"\"get all trades\"\"\"\n return self._do(self._get_base, 'trades', run_id)\n\n @return_df()\n def positions(self, run_id):\n \"\"\"get all positions (market_value)\"\"\"\n return self._do(self._get_base, 'positions', run_id)\n\n @return_df()\n def portfolio(self, run_id):\n \"\"\"get all portfolio\"\"\"\n return self._do(self._get_base, 'portfolio', run_id)\n\n @return_df('positions')\n def current_positions(self, run_id):\n \"\"\"get current positions\"\"\"\n return self._do(self._get_base, 'pt/load_current_positions', run_id)\n\n @return_df('trades')\n def current_trades(self, run_id):\n \"\"\"get current positions\"\"\"\n return self._do(self._get_base, 'pt/load_day_trades', run_id)\n\n def _get_base(self, api_path, run_id):\n resp = self.client.get('{}/{}/{}'.format(self.base_url, api_path,\n run_id), timeout=self.timeout)\n return resp.json()\n",
"step-4": "<mask token>\n\n\ndef return_df(field='data'):\n \"\"\"return DataFrame data\"\"\"\n\n def decorator(func):\n\n @wraps(func)\n def wrapper(self, *args, **kwargs):\n resp = func(self, *args, **kwargs)\n if resp.get('code') == 200 and self.return_df is True:\n df = pd.DataFrame(resp['resp'][field])\n if 'date' in df.columns:\n df['date'] = df['date'].apply(lambda x: datetime.\n datetime.strptime(x, '%Y-%m-%d %H:%M:%S'))\n df = df.set_index('date')\n return df\n return resp\n return wrapper\n return decorator\n\n\nclass RQOpenClient(object):\n\n def __init__(self, username, password, logger=None, log_level=logging.\n DEBUG, base_url='https://rqopen.ricequant.com', timeout=(5, 10),\n return_df=True):\n \"\"\"\n :param username: 登录账户\n :param password: 密码\n :param logger: 日志\n :param log_level: 日志级别\n :param base_url: 服务地址,默认web端 rqpro2.0需要单独配置\n :param timeout: 超时时间\n :param return_df: 返回数据是否为DataFrame False返回dict\n \"\"\"\n self.base_url = base_url\n if re.match('^[1]([3-9])[0-9]{9}$', username):\n username = '+86' + username\n self.username = username\n self.password = password\n self.client = requests.Session()\n self.logger = logger if logger else logging.getLogger('RQOpenClient')\n self.logger.setLevel(log_level)\n self.timeout = timeout\n self.return_df = return_df\n\n def login(self):\n self.logger.info('Try login. Username {}'.format(self.username))\n resp = self.client.post('{}/login'.format(self.base_url), {\n 'username': self.username, 'password': self.password}, timeout=\n self.timeout)\n ret = resp.json()\n self.logger.info('Login response {}'.format(ret))\n return ret\n\n def _do(self, func, *args, **kwargs):\n resp = func(*args, **kwargs)\n if resp['code'] == 401:\n login_resp = self.login()\n if login_resp['code'] == 200:\n self.logger.info('login success')\n else:\n return login_resp\n elif resp['code'] == 200:\n return resp\n resp = func(*args, **kwargs)\n return resp\n\n def get_day_trades(self, run_id):\n warnings.warn(\n 'get_day_trades will be abandoned, please use current_trades',\n DeprecationWarning)\n return self._do(self._get_day_trades, run_id)\n\n def get_positions(self, run_id):\n warnings.warn(\n 'current_positions will be abandoned, please use current_positions'\n , DeprecationWarning)\n return self._do(self._get_positions, run_id)\n\n def _get_day_trades(self, run_id):\n resp = self.client.get('{}/pt/load_day_trades/{}'.format(self.\n base_url, run_id), timeout=self.timeout)\n return resp.json()\n\n def _get_positions(self, run_id):\n resp = self.client.get('{}/pt/load_current_positions/{}'.format(\n self.base_url, run_id), timeout=self.timeout)\n return resp.json()\n\n @return_df()\n def trades(self, run_id):\n \"\"\"get all trades\"\"\"\n return self._do(self._get_base, 'trades', run_id)\n\n @return_df()\n def positions(self, run_id):\n \"\"\"get all positions (market_value)\"\"\"\n return self._do(self._get_base, 'positions', run_id)\n\n @return_df()\n def portfolio(self, run_id):\n \"\"\"get all portfolio\"\"\"\n return self._do(self._get_base, 'portfolio', run_id)\n\n @return_df('positions')\n def current_positions(self, run_id):\n \"\"\"get current positions\"\"\"\n return self._do(self._get_base, 'pt/load_current_positions', run_id)\n\n @return_df('trades')\n def current_trades(self, run_id):\n \"\"\"get current positions\"\"\"\n return self._do(self._get_base, 'pt/load_day_trades', run_id)\n\n def _get_base(self, api_path, run_id):\n resp = self.client.get('{}/{}/{}'.format(self.base_url, api_path,\n run_id), timeout=self.timeout)\n return resp.json()\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport datetime\nimport warnings\nfrom functools import wraps\nimport re\nimport logging\nimport pandas as pd\nimport requests\n\n\ndef return_df(field=\"data\"):\n \"\"\"return DataFrame data\"\"\"\n\n def decorator(func):\n @wraps(func)\n def wrapper(self, *args, **kwargs):\n resp = func(self, *args, **kwargs)\n if resp.get(\"code\") == 200 and self.return_df is True:\n df = pd.DataFrame(resp[\"resp\"][field])\n if \"date\" in df.columns:\n df['date'] = df['date'].apply(lambda x: datetime.datetime.strptime(x, \"%Y-%m-%d %H:%M:%S\"))\n df = df.set_index(\"date\")\n return df\n return resp\n\n return wrapper\n\n return decorator\n\n\nclass RQOpenClient(object):\n def __init__(self, username, password, logger=None, log_level=logging.DEBUG,\n base_url=\"https://rqopen.ricequant.com\", timeout=(5, 10), return_df=True):\n \"\"\"\n :param username: 登录账户\n :param password: 密码\n :param logger: 日志\n :param log_level: 日志级别\n :param base_url: 服务地址,默认web端 rqpro2.0需要单独配置\n :param timeout: 超时时间\n :param return_df: 返回数据是否为DataFrame False返回dict\n \"\"\"\n self.base_url = base_url\n # tel number need \"+86\"\n if re.match(r'^[1]([3-9])[0-9]{9}$', username):\n username = \"+86\" + username\n self.username = username\n self.password = password\n self.client = requests.Session()\n self.logger = logger if logger else logging.getLogger(\"RQOpenClient\")\n self.logger.setLevel(log_level)\n self.timeout = timeout\n self.return_df = return_df\n\n def login(self):\n self.logger.info(\"Try login. Username {}\".format(self.username))\n resp = self.client.post(\"{}/login\".format(self.base_url),\n {\"username\": self.username, \"password\": self.password}, timeout=self.timeout)\n ret = resp.json()\n self.logger.info(\"Login response {}\".format(ret))\n return ret\n\n def _do(self, func, *args, **kwargs):\n resp = func(*args, **kwargs)\n if resp[\"code\"] == 401:\n login_resp = self.login()\n if login_resp[\"code\"] == 200:\n self.logger.info(\"login success\")\n else:\n return login_resp\n elif resp[\"code\"] == 200:\n return resp\n resp = func(*args, **kwargs)\n return resp\n\n def get_day_trades(self, run_id):\n warnings.warn(\"get_day_trades will be abandoned, please use current_trades\", DeprecationWarning)\n return self._do(self._get_day_trades, run_id)\n\n def get_positions(self, run_id):\n warnings.warn(\"current_positions will be abandoned, please use current_positions\", DeprecationWarning)\n return self._do(self._get_positions, run_id)\n\n def _get_day_trades(self, run_id):\n resp = self.client.get(\"{}/pt/load_day_trades/{}\".format(self.base_url, run_id), timeout=self.timeout)\n return resp.json()\n\n def _get_positions(self, run_id):\n resp = self.client.get(\"{}/pt/load_current_positions/{}\".format(self.base_url, run_id), timeout=self.timeout)\n return resp.json()\n\n # base\n @return_df()\n def trades(self, run_id):\n \"\"\"get all trades\"\"\"\n return self._do(self._get_base, \"trades\", run_id)\n\n @return_df()\n def positions(self, run_id):\n \"\"\"get all positions (market_value)\"\"\"\n return self._do(self._get_base, \"positions\", run_id)\n\n @return_df()\n def portfolio(self, run_id):\n \"\"\"get all portfolio\"\"\"\n return self._do(self._get_base, \"portfolio\", run_id)\n\n @return_df(\"positions\")\n def current_positions(self, run_id):\n \"\"\"get current positions\"\"\"\n return self._do(self._get_base, \"pt/load_current_positions\", run_id)\n\n @return_df(\"trades\")\n def current_trades(self, run_id):\n \"\"\"get current positions\"\"\"\n return self._do(self._get_base, \"pt/load_day_trades\", run_id)\n\n def _get_base(self, api_path, run_id):\n resp = self.client.get(\"{}/{}/{}\".format(self.base_url, api_path, run_id), timeout=self.timeout)\n return resp.json()\n",
"step-ids": [
11,
12,
14,
15,
17
]
}
|
[
11,
12,
14,
15,
17
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from .tacotron_v2_synthesizer import Tacotron2Synthesizer
|
flexible
|
{
"blob_id": "cf2fcd013c3e9992da36806ca93aacb4b5399396",
"index": 3172,
"step-1": "<mask token>\n",
"step-2": "from .tacotron_v2_synthesizer import Tacotron2Synthesizer\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
class SpiderMiddlewares2(object):
def process_request(self, request):
print(u'SpiderMiddlewares2 process_request {}'.format(request.url))
return request
def process_item(self, item):
print(u'SpiderMiddlewares2 process_item {}'.format(item.data))
return item
<|reserved_special_token_1|>
class SpiderMiddlewares1(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class SpiderMiddlewares2(object):
def process_request(self, request):
print(u'SpiderMiddlewares2 process_request {}'.format(request.url))
return request
def process_item(self, item):
print(u'SpiderMiddlewares2 process_item {}'.format(item.data))
return item
<|reserved_special_token_1|>
class SpiderMiddlewares1(object):
<|reserved_special_token_0|>
def process_item(self, item):
print(u'SpiderMiddlewares1 process_item {}'.format(item.data))
return item
class SpiderMiddlewares2(object):
def process_request(self, request):
print(u'SpiderMiddlewares2 process_request {}'.format(request.url))
return request
def process_item(self, item):
print(u'SpiderMiddlewares2 process_item {}'.format(item.data))
return item
<|reserved_special_token_1|>
class SpiderMiddlewares1(object):
def process_request(self, request):
print(u'SpiderMiddlewares1 process_request {}'.format(request.url))
return request
def process_item(self, item):
print(u'SpiderMiddlewares1 process_item {}'.format(item.data))
return item
class SpiderMiddlewares2(object):
def process_request(self, request):
print(u'SpiderMiddlewares2 process_request {}'.format(request.url))
return request
def process_item(self, item):
print(u'SpiderMiddlewares2 process_item {}'.format(item.data))
return item
<|reserved_special_token_1|>
# coding:utf-8
class SpiderMiddlewares1(object):
def process_request(self, request):
print(u"SpiderMiddlewares1 process_request {}".format(request.url))
return request
def process_item(self, item):
print(u"SpiderMiddlewares1 process_item {}".format(item.data))
return item
class SpiderMiddlewares2(object):
def process_request(self, request):
print(u"SpiderMiddlewares2 process_request {}".format(request.url))
return request
def process_item(self, item):
print(u"SpiderMiddlewares2 process_item {}".format(item.data))
return item
|
flexible
|
{
"blob_id": "8a2ab260f4758bcca7b1a68d1fb65b7eebab5533",
"index": 2518,
"step-1": "<mask token>\n\n\nclass SpiderMiddlewares2(object):\n\n def process_request(self, request):\n print(u'SpiderMiddlewares2 process_request {}'.format(request.url))\n return request\n\n def process_item(self, item):\n print(u'SpiderMiddlewares2 process_item {}'.format(item.data))\n return item\n",
"step-2": "class SpiderMiddlewares1(object):\n <mask token>\n <mask token>\n\n\nclass SpiderMiddlewares2(object):\n\n def process_request(self, request):\n print(u'SpiderMiddlewares2 process_request {}'.format(request.url))\n return request\n\n def process_item(self, item):\n print(u'SpiderMiddlewares2 process_item {}'.format(item.data))\n return item\n",
"step-3": "class SpiderMiddlewares1(object):\n <mask token>\n\n def process_item(self, item):\n print(u'SpiderMiddlewares1 process_item {}'.format(item.data))\n return item\n\n\nclass SpiderMiddlewares2(object):\n\n def process_request(self, request):\n print(u'SpiderMiddlewares2 process_request {}'.format(request.url))\n return request\n\n def process_item(self, item):\n print(u'SpiderMiddlewares2 process_item {}'.format(item.data))\n return item\n",
"step-4": "class SpiderMiddlewares1(object):\n\n def process_request(self, request):\n print(u'SpiderMiddlewares1 process_request {}'.format(request.url))\n return request\n\n def process_item(self, item):\n print(u'SpiderMiddlewares1 process_item {}'.format(item.data))\n return item\n\n\nclass SpiderMiddlewares2(object):\n\n def process_request(self, request):\n print(u'SpiderMiddlewares2 process_request {}'.format(request.url))\n return request\n\n def process_item(self, item):\n print(u'SpiderMiddlewares2 process_item {}'.format(item.data))\n return item\n",
"step-5": "# coding:utf-8\n\n\nclass SpiderMiddlewares1(object):\n def process_request(self, request):\n print(u\"SpiderMiddlewares1 process_request {}\".format(request.url))\n return request\n\n def process_item(self, item):\n print(u\"SpiderMiddlewares1 process_item {}\".format(item.data))\n return item\n\n\nclass SpiderMiddlewares2(object):\n def process_request(self, request):\n print(u\"SpiderMiddlewares2 process_request {}\".format(request.url))\n return request\n\n def process_item(self, item):\n print(u\"SpiderMiddlewares2 process_item {}\".format(item.data))\n return item\n\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def decimal_to_binary(num):
if num == 0:
return '0'
binary = ''
while num != 0:
binary = str(num % 2) + binary
num = num // 2
return binary
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def decimal_to_binary(num):
if num == 0:
return '0'
binary = ''
while num != 0:
binary = str(num % 2) + binary
num = num // 2
return binary
def modulo(numerator, exp, denominator):
binary = decimal_to_binary(exp)
prev_result = numerator
result = 1
for i in range(len(binary) - 2, -1, -1):
reg = binary[i]
prev_result = prev_result * prev_result % denominator
if reg == '1':
result *= prev_result
if binary[0] == '1':
result *= numerator
return result % denominator
<|reserved_special_token_1|>
def decimal_to_binary(num):
if num == 0: return '0'
binary = ''
while num != 0:
binary = str(num % 2) + binary
num = num // 2
return binary
def modulo(numerator, exp, denominator):
binary = decimal_to_binary(exp)
prev_result = numerator
result = 1
for i in range(len(binary) - 2, -1, -1):
reg = binary[i]
prev_result = (prev_result * prev_result) % denominator
if (reg == '1'):
result *= prev_result
if binary[0] == '1':
result *= numerator
return result % denominator
# print(modulo(5, 149, 17))
# print(decimal_to_binary(0))
# print(decimal_to_binary(1))
# print(decimal_to_binary(2))
# print(decimal_to_binary(8))
# print(decimal_to_binary(10))
# print(decimal_to_binary(11))
|
flexible
|
{
"blob_id": "4e202cf7d7da865498ef5f65efdf5851c62082ff",
"index": 6764,
"step-1": "<mask token>\n",
"step-2": "def decimal_to_binary(num):\n if num == 0:\n return '0'\n binary = ''\n while num != 0:\n binary = str(num % 2) + binary\n num = num // 2\n return binary\n\n\n<mask token>\n",
"step-3": "def decimal_to_binary(num):\n if num == 0:\n return '0'\n binary = ''\n while num != 0:\n binary = str(num % 2) + binary\n num = num // 2\n return binary\n\n\ndef modulo(numerator, exp, denominator):\n binary = decimal_to_binary(exp)\n prev_result = numerator\n result = 1\n for i in range(len(binary) - 2, -1, -1):\n reg = binary[i]\n prev_result = prev_result * prev_result % denominator\n if reg == '1':\n result *= prev_result\n if binary[0] == '1':\n result *= numerator\n return result % denominator\n",
"step-4": "def decimal_to_binary(num):\n if num == 0: return '0'\n\n binary = ''\n while num != 0:\n binary = str(num % 2) + binary\n num = num // 2\n return binary\n\ndef modulo(numerator, exp, denominator):\n binary = decimal_to_binary(exp)\n\n prev_result = numerator\n result = 1\n for i in range(len(binary) - 2, -1, -1):\n reg = binary[i]\n prev_result = (prev_result * prev_result) % denominator\n if (reg == '1'):\n result *= prev_result\n \n if binary[0] == '1':\n result *= numerator\n \n return result % denominator\n\n# print(modulo(5, 149, 17))\n\n# print(decimal_to_binary(0))\n# print(decimal_to_binary(1))\n# print(decimal_to_binary(2))\n# print(decimal_to_binary(8))\n# print(decimal_to_binary(10))\n# print(decimal_to_binary(11))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Dataset(object):
def __init__(self):
self.items_descriptions = {'image':
'A color image of varying height and width.', 'shape':
'Shape of the image', 'object/bbox':
'A list of bounding boxes, one per each object.',
'object/label': 'A list of labels, one per each object.'}
self.features = {'image/encoded': tf.FixedLenFeature((), tf.string,
default_value=''), 'image/format': tf.FixedLenFeature((), tf.
string, default_value='jpeg'), 'image/object/bbox/xmin': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymin': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/xmax': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymax': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/label': tf.
VarLenFeature(dtype=tf.int64), 'image/object/bbox/difficult':
tf.VarLenFeature(dtype=tf.int64)}
self.items = {'image': slim.tfexample_decoder.Image('image/encoded',
'image/format'), 'gt_bboxes': slim.tfexample_decoder.
BoundingBox(['ymin', 'xmin', 'ymax', 'xmax'],
'image/object/bbox/'), 'gt_labels': slim.tfexample_decoder.
Tensor('image/object/bbox/label'), 'difficult_objects': slim.
tfexample_decoder.Tensor('image/object/bbox/difficult')}
def read_dataset_from_tfrecords(self, dataset_name, train_or_test,
dataset_path):
with tf.name_scope(None, 'read_dataset_from_tfrecords') as scope:
if (dataset_name == 'pascalvoc_2007' or dataset_name ==
'pascalvoc_2012'):
dataset = self.load_dataset(dataset_name, train_or_test,
dataset_path)
return dataset
def load_dataset(self, dataset_name, train_or_test, dataset_path):
dataset_file_name = dataset_name[6:] + '_%s_*.tfrecord'
if dataset_name == 'pascalvoc_2007':
train_test_sizes = {'train': FLAGS.pascalvoc_2007_train_size,
'test': FLAGS.pascalvoc_2007_test_size}
elif dataset_name == 'pascalvoc_2012':
train_test_sizes = {'train': FLAGS.pascalvoc_2012_train_size}
dataset_file_name = os.path.join(dataset_path, dataset_file_name %
train_or_test)
reader = tf.TFRecordReader
decoder = slim.tfexample_decoder.TFExampleDecoder(self.features,
self.items)
return slim.dataset.Dataset(data_sources=dataset_file_name, reader=
reader, decoder=decoder, num_samples=train_test_sizes[
train_or_test], items_to_descriptions=self.items_descriptions,
num_classes=FLAGS.num_classes - 1, labels_to_names=None)
def get_groundtruth_from_dataset(self, dataset, train_or_test):
with tf.name_scope(None, 'get_groundtruth_from_dataset') as scope:
if train_or_test == 'test':
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset, num_readers=FLAGS.test_num_readers,
common_queue_capacity=FLAGS.test_common_queue_capacity,
common_queue_min=FLAGS.test_batch_size, shuffle=FLAGS.
test_shuffle)
elif train_or_test == 'train':
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset, num_readers=FLAGS.train_num_readers,
common_queue_capacity=FLAGS.train_common_queue_capacity,
common_queue_min=10 * FLAGS.train_batch_size, shuffle=
FLAGS.train_shuffle)
[image, gt_bboxes, gt_labels] = provider.get(['image',
'gt_bboxes', 'gt_labels'])
gt_difficult_objects = tf.zeros(tf.shape(gt_labels), dtype=tf.int64
)
if FLAGS.test_discard_difficult_objects:
[gt_difficult_objects] = provider.get(['difficult_objects'])
return [image, gt_bboxes, gt_labels, gt_difficult_objects]
def _process_image_PascalVOC(self, directory, name):
filename = directory + DIRECTORY_IMAGES + name + '.jpg'
image_data = tf.gfile.FastGFile(filename, 'r').read()
filename = os.path.join(directory, DIRECTORY_ANNOTATIONS, name + '.xml'
)
tree = ET.parse(filename)
root = tree.getroot()
size = root.find('size')
shape = [int(size.find('height').text), int(size.find('width').text
), int(size.find('depth').text)]
bboxes = []
labels = []
labels_text = []
difficult = []
truncated = []
for obj in root.findall('object'):
label = obj.find('name').text
labels.append(int(VOC_LABELS[label][0]))
labels_text.append(label.encode('ascii'))
if obj.find('difficult'):
difficult.append(int(obj.find('difficult').text))
else:
difficult.append(0)
if obj.find('truncated'):
truncated.append(int(obj.find('truncated').text))
else:
truncated.append(0)
bbox = obj.find('bndbox')
bboxes.append((float(bbox.find('ymin').text) / shape[0], float(
bbox.find('xmin').text) / shape[1], float(bbox.find('ymax')
.text) / shape[0], float(bbox.find('xmax').text) / shape[1]))
return (image_data, shape, bboxes, labels, labels_text, difficult,
truncated)
def _convert_to_example_PascalVOC(self, image_data, labels, labels_text,
bboxes, shape, difficult, truncated):
xmin = []
ymin = []
xmax = []
ymax = []
for b in bboxes:
assert len(b) == 4
[l.append(point) for l, point in zip([ymin, xmin, ymax, xmax], b)]
image_format = b'JPEG'
example = tf.train.Example(features=tf.train.Features(feature={
'image/height': self.int64_feature(shape[0]), 'image/width':
self.int64_feature(shape[1]), 'image/channels': self.
int64_feature(shape[2]), 'image/shape': self.int64_feature(
shape), 'image/object/bbox/xmin': self.float_feature(xmin),
'image/object/bbox/xmax': self.float_feature(xmax),
'image/object/bbox/ymin': self.float_feature(ymin),
'image/object/bbox/ymax': self.float_feature(ymax),
'image/object/bbox/label': self.int64_feature(labels),
'image/object/bbox/label_text': self.bytes_feature(labels_text),
'image/object/bbox/difficult': self.int64_feature(difficult),
'image/object/bbox/truncated': self.int64_feature(truncated),
'image/format': self.bytes_feature(image_format),
'image/encoded': self.bytes_feature(image_data)}))
return example
<|reserved_special_token_0|>
def _get_output_filename_PascalVOC(output_dir, name, idx):
return '%s/%s_%03d.tfrecord' % (output_dir, name, idx)
<|reserved_special_token_0|>
def int64_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
def float_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
def bytes_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Dataset(object):
def __init__(self):
self.items_descriptions = {'image':
'A color image of varying height and width.', 'shape':
'Shape of the image', 'object/bbox':
'A list of bounding boxes, one per each object.',
'object/label': 'A list of labels, one per each object.'}
self.features = {'image/encoded': tf.FixedLenFeature((), tf.string,
default_value=''), 'image/format': tf.FixedLenFeature((), tf.
string, default_value='jpeg'), 'image/object/bbox/xmin': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymin': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/xmax': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymax': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/label': tf.
VarLenFeature(dtype=tf.int64), 'image/object/bbox/difficult':
tf.VarLenFeature(dtype=tf.int64)}
self.items = {'image': slim.tfexample_decoder.Image('image/encoded',
'image/format'), 'gt_bboxes': slim.tfexample_decoder.
BoundingBox(['ymin', 'xmin', 'ymax', 'xmax'],
'image/object/bbox/'), 'gt_labels': slim.tfexample_decoder.
Tensor('image/object/bbox/label'), 'difficult_objects': slim.
tfexample_decoder.Tensor('image/object/bbox/difficult')}
def read_dataset_from_tfrecords(self, dataset_name, train_or_test,
dataset_path):
with tf.name_scope(None, 'read_dataset_from_tfrecords') as scope:
if (dataset_name == 'pascalvoc_2007' or dataset_name ==
'pascalvoc_2012'):
dataset = self.load_dataset(dataset_name, train_or_test,
dataset_path)
return dataset
def load_dataset(self, dataset_name, train_or_test, dataset_path):
dataset_file_name = dataset_name[6:] + '_%s_*.tfrecord'
if dataset_name == 'pascalvoc_2007':
train_test_sizes = {'train': FLAGS.pascalvoc_2007_train_size,
'test': FLAGS.pascalvoc_2007_test_size}
elif dataset_name == 'pascalvoc_2012':
train_test_sizes = {'train': FLAGS.pascalvoc_2012_train_size}
dataset_file_name = os.path.join(dataset_path, dataset_file_name %
train_or_test)
reader = tf.TFRecordReader
decoder = slim.tfexample_decoder.TFExampleDecoder(self.features,
self.items)
return slim.dataset.Dataset(data_sources=dataset_file_name, reader=
reader, decoder=decoder, num_samples=train_test_sizes[
train_or_test], items_to_descriptions=self.items_descriptions,
num_classes=FLAGS.num_classes - 1, labels_to_names=None)
def get_groundtruth_from_dataset(self, dataset, train_or_test):
with tf.name_scope(None, 'get_groundtruth_from_dataset') as scope:
if train_or_test == 'test':
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset, num_readers=FLAGS.test_num_readers,
common_queue_capacity=FLAGS.test_common_queue_capacity,
common_queue_min=FLAGS.test_batch_size, shuffle=FLAGS.
test_shuffle)
elif train_or_test == 'train':
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset, num_readers=FLAGS.train_num_readers,
common_queue_capacity=FLAGS.train_common_queue_capacity,
common_queue_min=10 * FLAGS.train_batch_size, shuffle=
FLAGS.train_shuffle)
[image, gt_bboxes, gt_labels] = provider.get(['image',
'gt_bboxes', 'gt_labels'])
gt_difficult_objects = tf.zeros(tf.shape(gt_labels), dtype=tf.int64
)
if FLAGS.test_discard_difficult_objects:
[gt_difficult_objects] = provider.get(['difficult_objects'])
return [image, gt_bboxes, gt_labels, gt_difficult_objects]
def _process_image_PascalVOC(self, directory, name):
filename = directory + DIRECTORY_IMAGES + name + '.jpg'
image_data = tf.gfile.FastGFile(filename, 'r').read()
filename = os.path.join(directory, DIRECTORY_ANNOTATIONS, name + '.xml'
)
tree = ET.parse(filename)
root = tree.getroot()
size = root.find('size')
shape = [int(size.find('height').text), int(size.find('width').text
), int(size.find('depth').text)]
bboxes = []
labels = []
labels_text = []
difficult = []
truncated = []
for obj in root.findall('object'):
label = obj.find('name').text
labels.append(int(VOC_LABELS[label][0]))
labels_text.append(label.encode('ascii'))
if obj.find('difficult'):
difficult.append(int(obj.find('difficult').text))
else:
difficult.append(0)
if obj.find('truncated'):
truncated.append(int(obj.find('truncated').text))
else:
truncated.append(0)
bbox = obj.find('bndbox')
bboxes.append((float(bbox.find('ymin').text) / shape[0], float(
bbox.find('xmin').text) / shape[1], float(bbox.find('ymax')
.text) / shape[0], float(bbox.find('xmax').text) / shape[1]))
return (image_data, shape, bboxes, labels, labels_text, difficult,
truncated)
def _convert_to_example_PascalVOC(self, image_data, labels, labels_text,
bboxes, shape, difficult, truncated):
xmin = []
ymin = []
xmax = []
ymax = []
for b in bboxes:
assert len(b) == 4
[l.append(point) for l, point in zip([ymin, xmin, ymax, xmax], b)]
image_format = b'JPEG'
example = tf.train.Example(features=tf.train.Features(feature={
'image/height': self.int64_feature(shape[0]), 'image/width':
self.int64_feature(shape[1]), 'image/channels': self.
int64_feature(shape[2]), 'image/shape': self.int64_feature(
shape), 'image/object/bbox/xmin': self.float_feature(xmin),
'image/object/bbox/xmax': self.float_feature(xmax),
'image/object/bbox/ymin': self.float_feature(ymin),
'image/object/bbox/ymax': self.float_feature(ymax),
'image/object/bbox/label': self.int64_feature(labels),
'image/object/bbox/label_text': self.bytes_feature(labels_text),
'image/object/bbox/difficult': self.int64_feature(difficult),
'image/object/bbox/truncated': self.int64_feature(truncated),
'image/format': self.bytes_feature(image_format),
'image/encoded': self.bytes_feature(image_data)}))
return example
def _add_to_tfrecord_PascalVOC(self, dataset_dir, name, tfrecord_writer):
(image_data, shape, bboxes, labels, labels_text, difficult, truncated
) = self._process_image_PascalVOC(dataset_dir, name)
example = self._convert_to_example_PascalVOC(image_data, labels,
labels_text, bboxes, shape, difficult, truncated)
tfrecord_writer.write(example.SerializeToString())
def _get_output_filename_PascalVOC(output_dir, name, idx):
return '%s/%s_%03d.tfrecord' % (output_dir, name, idx)
<|reserved_special_token_0|>
def int64_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
def float_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
def bytes_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Dataset(object):
def __init__(self):
self.items_descriptions = {'image':
'A color image of varying height and width.', 'shape':
'Shape of the image', 'object/bbox':
'A list of bounding boxes, one per each object.',
'object/label': 'A list of labels, one per each object.'}
self.features = {'image/encoded': tf.FixedLenFeature((), tf.string,
default_value=''), 'image/format': tf.FixedLenFeature((), tf.
string, default_value='jpeg'), 'image/object/bbox/xmin': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymin': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/xmax': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymax': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/label': tf.
VarLenFeature(dtype=tf.int64), 'image/object/bbox/difficult':
tf.VarLenFeature(dtype=tf.int64)}
self.items = {'image': slim.tfexample_decoder.Image('image/encoded',
'image/format'), 'gt_bboxes': slim.tfexample_decoder.
BoundingBox(['ymin', 'xmin', 'ymax', 'xmax'],
'image/object/bbox/'), 'gt_labels': slim.tfexample_decoder.
Tensor('image/object/bbox/label'), 'difficult_objects': slim.
tfexample_decoder.Tensor('image/object/bbox/difficult')}
def read_dataset_from_tfrecords(self, dataset_name, train_or_test,
dataset_path):
with tf.name_scope(None, 'read_dataset_from_tfrecords') as scope:
if (dataset_name == 'pascalvoc_2007' or dataset_name ==
'pascalvoc_2012'):
dataset = self.load_dataset(dataset_name, train_or_test,
dataset_path)
return dataset
def load_dataset(self, dataset_name, train_or_test, dataset_path):
dataset_file_name = dataset_name[6:] + '_%s_*.tfrecord'
if dataset_name == 'pascalvoc_2007':
train_test_sizes = {'train': FLAGS.pascalvoc_2007_train_size,
'test': FLAGS.pascalvoc_2007_test_size}
elif dataset_name == 'pascalvoc_2012':
train_test_sizes = {'train': FLAGS.pascalvoc_2012_train_size}
dataset_file_name = os.path.join(dataset_path, dataset_file_name %
train_or_test)
reader = tf.TFRecordReader
decoder = slim.tfexample_decoder.TFExampleDecoder(self.features,
self.items)
return slim.dataset.Dataset(data_sources=dataset_file_name, reader=
reader, decoder=decoder, num_samples=train_test_sizes[
train_or_test], items_to_descriptions=self.items_descriptions,
num_classes=FLAGS.num_classes - 1, labels_to_names=None)
def get_groundtruth_from_dataset(self, dataset, train_or_test):
with tf.name_scope(None, 'get_groundtruth_from_dataset') as scope:
if train_or_test == 'test':
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset, num_readers=FLAGS.test_num_readers,
common_queue_capacity=FLAGS.test_common_queue_capacity,
common_queue_min=FLAGS.test_batch_size, shuffle=FLAGS.
test_shuffle)
elif train_or_test == 'train':
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset, num_readers=FLAGS.train_num_readers,
common_queue_capacity=FLAGS.train_common_queue_capacity,
common_queue_min=10 * FLAGS.train_batch_size, shuffle=
FLAGS.train_shuffle)
[image, gt_bboxes, gt_labels] = provider.get(['image',
'gt_bboxes', 'gt_labels'])
gt_difficult_objects = tf.zeros(tf.shape(gt_labels), dtype=tf.int64
)
if FLAGS.test_discard_difficult_objects:
[gt_difficult_objects] = provider.get(['difficult_objects'])
return [image, gt_bboxes, gt_labels, gt_difficult_objects]
def _process_image_PascalVOC(self, directory, name):
filename = directory + DIRECTORY_IMAGES + name + '.jpg'
image_data = tf.gfile.FastGFile(filename, 'r').read()
filename = os.path.join(directory, DIRECTORY_ANNOTATIONS, name + '.xml'
)
tree = ET.parse(filename)
root = tree.getroot()
size = root.find('size')
shape = [int(size.find('height').text), int(size.find('width').text
), int(size.find('depth').text)]
bboxes = []
labels = []
labels_text = []
difficult = []
truncated = []
for obj in root.findall('object'):
label = obj.find('name').text
labels.append(int(VOC_LABELS[label][0]))
labels_text.append(label.encode('ascii'))
if obj.find('difficult'):
difficult.append(int(obj.find('difficult').text))
else:
difficult.append(0)
if obj.find('truncated'):
truncated.append(int(obj.find('truncated').text))
else:
truncated.append(0)
bbox = obj.find('bndbox')
bboxes.append((float(bbox.find('ymin').text) / shape[0], float(
bbox.find('xmin').text) / shape[1], float(bbox.find('ymax')
.text) / shape[0], float(bbox.find('xmax').text) / shape[1]))
return (image_data, shape, bboxes, labels, labels_text, difficult,
truncated)
def _convert_to_example_PascalVOC(self, image_data, labels, labels_text,
bboxes, shape, difficult, truncated):
xmin = []
ymin = []
xmax = []
ymax = []
for b in bboxes:
assert len(b) == 4
[l.append(point) for l, point in zip([ymin, xmin, ymax, xmax], b)]
image_format = b'JPEG'
example = tf.train.Example(features=tf.train.Features(feature={
'image/height': self.int64_feature(shape[0]), 'image/width':
self.int64_feature(shape[1]), 'image/channels': self.
int64_feature(shape[2]), 'image/shape': self.int64_feature(
shape), 'image/object/bbox/xmin': self.float_feature(xmin),
'image/object/bbox/xmax': self.float_feature(xmax),
'image/object/bbox/ymin': self.float_feature(ymin),
'image/object/bbox/ymax': self.float_feature(ymax),
'image/object/bbox/label': self.int64_feature(labels),
'image/object/bbox/label_text': self.bytes_feature(labels_text),
'image/object/bbox/difficult': self.int64_feature(difficult),
'image/object/bbox/truncated': self.int64_feature(truncated),
'image/format': self.bytes_feature(image_format),
'image/encoded': self.bytes_feature(image_data)}))
return example
def _add_to_tfrecord_PascalVOC(self, dataset_dir, name, tfrecord_writer):
(image_data, shape, bboxes, labels, labels_text, difficult, truncated
) = self._process_image_PascalVOC(dataset_dir, name)
example = self._convert_to_example_PascalVOC(image_data, labels,
labels_text, bboxes, shape, difficult, truncated)
tfrecord_writer.write(example.SerializeToString())
def _get_output_filename_PascalVOC(output_dir, name, idx):
return '%s/%s_%03d.tfrecord' % (output_dir, name, idx)
def run_PascalVOC(self, dataset_dir, output_dir, name='voc_train',
shuffling=False):
if not tf.gfile.Exists(dataset_dir):
tf.gfile.MakeDirs(dataset_dir)
path = os.path.join(dataset_dir, DIRECTORY_ANNOTATIONS)
filenames = sorted(os.listdir(path))
if shuffling:
random.seed(RANDOM_SEED)
random.shuffle(filenames)
i = 0
fidx = 0
while i < len(filenames):
tf_filename = self._get_output_filename(output_dir, name, fidx)
with tf.python_io.TFRecordWriter(tf_filename) as tfrecord_writer:
j = 0
while i < len(filenames) and j < SAMPLES_PER_FILES:
sys.stdout.write('\r>> Converting image %d/%d' % (i + 1,
len(filenames)))
sys.stdout.flush()
filename = filenames[i]
img_name = filename[:-4]
self._add_to_tfrecord_PascalVOC(dataset_dir, img_name,
tfrecord_writer)
i += 1
j += 1
fidx += 1
print('\n ImageDB to TF conversion finished. ')
def int64_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
def float_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
def bytes_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
DIRECTORY_ANNOTATIONS = 'Annotations/'
DIRECTORY_IMAGES = 'JPEGImages/'
RANDOM_SEED = 4242
SAMPLES_PER_FILES = 200
slim = tf.contrib.slim
class Dataset(object):
def __init__(self):
self.items_descriptions = {'image':
'A color image of varying height and width.', 'shape':
'Shape of the image', 'object/bbox':
'A list of bounding boxes, one per each object.',
'object/label': 'A list of labels, one per each object.'}
self.features = {'image/encoded': tf.FixedLenFeature((), tf.string,
default_value=''), 'image/format': tf.FixedLenFeature((), tf.
string, default_value='jpeg'), 'image/object/bbox/xmin': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymin': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/xmax': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymax': tf.
VarLenFeature(dtype=tf.float32), 'image/object/bbox/label': tf.
VarLenFeature(dtype=tf.int64), 'image/object/bbox/difficult':
tf.VarLenFeature(dtype=tf.int64)}
self.items = {'image': slim.tfexample_decoder.Image('image/encoded',
'image/format'), 'gt_bboxes': slim.tfexample_decoder.
BoundingBox(['ymin', 'xmin', 'ymax', 'xmax'],
'image/object/bbox/'), 'gt_labels': slim.tfexample_decoder.
Tensor('image/object/bbox/label'), 'difficult_objects': slim.
tfexample_decoder.Tensor('image/object/bbox/difficult')}
def read_dataset_from_tfrecords(self, dataset_name, train_or_test,
dataset_path):
with tf.name_scope(None, 'read_dataset_from_tfrecords') as scope:
if (dataset_name == 'pascalvoc_2007' or dataset_name ==
'pascalvoc_2012'):
dataset = self.load_dataset(dataset_name, train_or_test,
dataset_path)
return dataset
def load_dataset(self, dataset_name, train_or_test, dataset_path):
dataset_file_name = dataset_name[6:] + '_%s_*.tfrecord'
if dataset_name == 'pascalvoc_2007':
train_test_sizes = {'train': FLAGS.pascalvoc_2007_train_size,
'test': FLAGS.pascalvoc_2007_test_size}
elif dataset_name == 'pascalvoc_2012':
train_test_sizes = {'train': FLAGS.pascalvoc_2012_train_size}
dataset_file_name = os.path.join(dataset_path, dataset_file_name %
train_or_test)
reader = tf.TFRecordReader
decoder = slim.tfexample_decoder.TFExampleDecoder(self.features,
self.items)
return slim.dataset.Dataset(data_sources=dataset_file_name, reader=
reader, decoder=decoder, num_samples=train_test_sizes[
train_or_test], items_to_descriptions=self.items_descriptions,
num_classes=FLAGS.num_classes - 1, labels_to_names=None)
def get_groundtruth_from_dataset(self, dataset, train_or_test):
with tf.name_scope(None, 'get_groundtruth_from_dataset') as scope:
if train_or_test == 'test':
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset, num_readers=FLAGS.test_num_readers,
common_queue_capacity=FLAGS.test_common_queue_capacity,
common_queue_min=FLAGS.test_batch_size, shuffle=FLAGS.
test_shuffle)
elif train_or_test == 'train':
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset, num_readers=FLAGS.train_num_readers,
common_queue_capacity=FLAGS.train_common_queue_capacity,
common_queue_min=10 * FLAGS.train_batch_size, shuffle=
FLAGS.train_shuffle)
[image, gt_bboxes, gt_labels] = provider.get(['image',
'gt_bboxes', 'gt_labels'])
gt_difficult_objects = tf.zeros(tf.shape(gt_labels), dtype=tf.int64
)
if FLAGS.test_discard_difficult_objects:
[gt_difficult_objects] = provider.get(['difficult_objects'])
return [image, gt_bboxes, gt_labels, gt_difficult_objects]
def _process_image_PascalVOC(self, directory, name):
filename = directory + DIRECTORY_IMAGES + name + '.jpg'
image_data = tf.gfile.FastGFile(filename, 'r').read()
filename = os.path.join(directory, DIRECTORY_ANNOTATIONS, name + '.xml'
)
tree = ET.parse(filename)
root = tree.getroot()
size = root.find('size')
shape = [int(size.find('height').text), int(size.find('width').text
), int(size.find('depth').text)]
bboxes = []
labels = []
labels_text = []
difficult = []
truncated = []
for obj in root.findall('object'):
label = obj.find('name').text
labels.append(int(VOC_LABELS[label][0]))
labels_text.append(label.encode('ascii'))
if obj.find('difficult'):
difficult.append(int(obj.find('difficult').text))
else:
difficult.append(0)
if obj.find('truncated'):
truncated.append(int(obj.find('truncated').text))
else:
truncated.append(0)
bbox = obj.find('bndbox')
bboxes.append((float(bbox.find('ymin').text) / shape[0], float(
bbox.find('xmin').text) / shape[1], float(bbox.find('ymax')
.text) / shape[0], float(bbox.find('xmax').text) / shape[1]))
return (image_data, shape, bboxes, labels, labels_text, difficult,
truncated)
def _convert_to_example_PascalVOC(self, image_data, labels, labels_text,
bboxes, shape, difficult, truncated):
xmin = []
ymin = []
xmax = []
ymax = []
for b in bboxes:
assert len(b) == 4
[l.append(point) for l, point in zip([ymin, xmin, ymax, xmax], b)]
image_format = b'JPEG'
example = tf.train.Example(features=tf.train.Features(feature={
'image/height': self.int64_feature(shape[0]), 'image/width':
self.int64_feature(shape[1]), 'image/channels': self.
int64_feature(shape[2]), 'image/shape': self.int64_feature(
shape), 'image/object/bbox/xmin': self.float_feature(xmin),
'image/object/bbox/xmax': self.float_feature(xmax),
'image/object/bbox/ymin': self.float_feature(ymin),
'image/object/bbox/ymax': self.float_feature(ymax),
'image/object/bbox/label': self.int64_feature(labels),
'image/object/bbox/label_text': self.bytes_feature(labels_text),
'image/object/bbox/difficult': self.int64_feature(difficult),
'image/object/bbox/truncated': self.int64_feature(truncated),
'image/format': self.bytes_feature(image_format),
'image/encoded': self.bytes_feature(image_data)}))
return example
def _add_to_tfrecord_PascalVOC(self, dataset_dir, name, tfrecord_writer):
(image_data, shape, bboxes, labels, labels_text, difficult, truncated
) = self._process_image_PascalVOC(dataset_dir, name)
example = self._convert_to_example_PascalVOC(image_data, labels,
labels_text, bboxes, shape, difficult, truncated)
tfrecord_writer.write(example.SerializeToString())
def _get_output_filename_PascalVOC(output_dir, name, idx):
return '%s/%s_%03d.tfrecord' % (output_dir, name, idx)
def run_PascalVOC(self, dataset_dir, output_dir, name='voc_train',
shuffling=False):
if not tf.gfile.Exists(dataset_dir):
tf.gfile.MakeDirs(dataset_dir)
path = os.path.join(dataset_dir, DIRECTORY_ANNOTATIONS)
filenames = sorted(os.listdir(path))
if shuffling:
random.seed(RANDOM_SEED)
random.shuffle(filenames)
i = 0
fidx = 0
while i < len(filenames):
tf_filename = self._get_output_filename(output_dir, name, fidx)
with tf.python_io.TFRecordWriter(tf_filename) as tfrecord_writer:
j = 0
while i < len(filenames) and j < SAMPLES_PER_FILES:
sys.stdout.write('\r>> Converting image %d/%d' % (i + 1,
len(filenames)))
sys.stdout.flush()
filename = filenames[i]
img_name = filename[:-4]
self._add_to_tfrecord_PascalVOC(dataset_dir, img_name,
tfrecord_writer)
i += 1
j += 1
fidx += 1
print('\n ImageDB to TF conversion finished. ')
def int64_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
def float_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
def bytes_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
<|reserved_special_token_1|>
# This module is used to load pascalvoc datasets (2007 or 2012)
import os
import tensorflow as tf
from configs.config_common import *
from configs.config_train import *
from configs.config_test import *
import sys
import random
import numpy as np
import xml.etree.ElementTree as ET
# Original dataset organisation.
DIRECTORY_ANNOTATIONS = 'Annotations/'
DIRECTORY_IMAGES = 'JPEGImages/'
# TFRecords convertion parameters.
RANDOM_SEED = 4242
SAMPLES_PER_FILES = 200
slim = tf.contrib.slim
class Dataset(object):
def __init__(self):
# Descriptions of the image items
self.items_descriptions = {
'image': 'A color image of varying height and width.',
'shape': 'Shape of the image',
'object/bbox': 'A list of bounding boxes, one per each object.',
'object/label': 'A list of labels, one per each object.',
}
# Features of Pascal VOC TFRecords.
self.features = {
'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''),
'image/format': tf.FixedLenFeature((), tf.string, default_value='jpeg'),
'image/object/bbox/xmin': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymin': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/xmax': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymax': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/label': tf.VarLenFeature(dtype=tf.int64),
'image/object/bbox/difficult': tf.VarLenFeature(dtype=tf.int64),
}
# Items in Pascal VOC TFRecords.
self.items = {
'image': slim.tfexample_decoder.Image('image/encoded', 'image/format'),
'gt_bboxes': slim.tfexample_decoder.BoundingBox(['ymin','xmin','ymax','xmax'], 'image/object/bbox/'),
'gt_labels': slim.tfexample_decoder.Tensor('image/object/bbox/label'),
'difficult_objects': slim.tfexample_decoder.Tensor('image/object/bbox/difficult'),
}
# This function reads dataset from tfrecords
# Inputs:
# datase_name: pascalvoc_2007
# train_or_test: test
# dataset_path: './tfrecords_test/'
# Outputs:
# loaded dataset
def read_dataset_from_tfrecords(self, dataset_name, train_or_test, dataset_path):
with tf.name_scope(None, "read_dataset_from_tfrecords") as scope:
if dataset_name == 'pascalvoc_2007' or dataset_name == 'pascalvoc_2012':
dataset = self.load_dataset(dataset_name, train_or_test, dataset_path)
return dataset
# This function is used to load pascalvoc2007 or psaclvoc2012 datasets
# Inputs:
# dataset_name: pascalvoc_2007
# train_or_test: test
# dataset_path: './tfrecords_test/'
# Output:
# loaded dataset
def load_dataset(self, dataset_name, train_or_test, dataset_path):
dataset_file_name = dataset_name[6:] + '_%s_*.tfrecord'
if dataset_name == 'pascalvoc_2007':
train_test_sizes = {
'train': FLAGS.pascalvoc_2007_train_size,
'test': FLAGS.pascalvoc_2007_test_size,
}
elif dataset_name == 'pascalvoc_2012':
train_test_sizes = {
'train': FLAGS.pascalvoc_2012_train_size,
}
dataset_file_name = os.path.join(dataset_path, dataset_file_name % train_or_test)
reader = tf.TFRecordReader
decoder = slim.tfexample_decoder.TFExampleDecoder(self.features, self.items)
return slim.dataset.Dataset(
data_sources=dataset_file_name,
reader=reader,
decoder=decoder,
num_samples=train_test_sizes[train_or_test],
items_to_descriptions=self.items_descriptions,
num_classes=FLAGS.num_classes-1,
labels_to_names=None)
# This function gets groundtruth bboxes & labels from dataset
# Inputs:
# dataset
# train_or_test: train/test
# Output:
# image, ground-truth bboxes, ground-truth labels, ground-truth difficult objects
def get_groundtruth_from_dataset(self, dataset, train_or_test):
# Dataset provider
with tf.name_scope(None, "get_groundtruth_from_dataset") as scope:
if train_or_test == 'test':
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset,
num_readers=FLAGS.test_num_readers,
common_queue_capacity=FLAGS.test_common_queue_capacity,
common_queue_min=FLAGS.test_batch_size,
shuffle=FLAGS.test_shuffle)
elif train_or_test == 'train':
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset,
num_readers= FLAGS.train_num_readers,
common_queue_capacity= FLAGS.train_common_queue_capacity,
common_queue_min= 10 * FLAGS.train_batch_size,
shuffle=FLAGS.train_shuffle)
# Get images, groundtruth bboxes & groundtruth labels from database
[image, gt_bboxes, gt_labels] = provider.get(['image','gt_bboxes','gt_labels'])
# Discard difficult objects
gt_difficult_objects = tf.zeros(tf.shape(gt_labels), dtype=tf.int64)
if FLAGS.test_discard_difficult_objects:
[gt_difficult_objects] = provider.get(['difficult_objects'])
return [image, gt_bboxes, gt_labels, gt_difficult_objects]
##########################################
# Convert PascalVOC to TF recorsd
# Process a image and annotation file.
# Inputs:
# filename: string, path to an image file e.g., '/path/to/example.JPG'.
# coder: instance of ImageCoder to provide TensorFlow image coding utils.
# Outputs:
# image_buffer: string, JPEG encoding of RGB image.
# height: integer, image height in pixels.
# width: integer, image width in pixels.
def _process_image_PascalVOC(self, directory, name):
# Read the image file.
filename = directory + DIRECTORY_IMAGES + name + '.jpg'
image_data = tf.gfile.FastGFile(filename, 'r').read()
# Read the XML annotation file.
filename = os.path.join(directory, DIRECTORY_ANNOTATIONS, name + '.xml')
tree = ET.parse(filename)
root = tree.getroot()
# Image shape.
size = root.find('size')
shape = [int(size.find('height').text), int(size.find('width').text), int(size.find('depth').text)]
# Find annotations.
bboxes = []
labels = []
labels_text = []
difficult = []
truncated = []
for obj in root.findall('object'):
label = obj.find('name').text
labels.append(int(VOC_LABELS[label][0]))
labels_text.append(label.encode('ascii'))
if obj.find('difficult'):
difficult.append(int(obj.find('difficult').text))
else:
difficult.append(0)
if obj.find('truncated'):
truncated.append(int(obj.find('truncated').text))
else:
truncated.append(0)
bbox = obj.find('bndbox')
bboxes.append((float(bbox.find('ymin').text) / shape[0],
float(bbox.find('xmin').text) / shape[1],
float(bbox.find('ymax').text) / shape[0],
float(bbox.find('xmax').text) / shape[1]
))
return image_data, shape, bboxes, labels, labels_text, difficult, truncated
# Build an Example proto for an image example.
# Args:
# image_data: string, JPEG encoding of RGB image;
# labels: list of integers, identifier for the ground truth;
# labels_text: list of strings, human-readable labels;
# bboxes: list of bounding boxes; each box is a list of integers;
# shape: 3 integers, image shapes in pixels.
# Returns:
# Example proto
def _convert_to_example_PascalVOC(self, image_data, labels, labels_text, bboxes, shape, difficult, truncated):
xmin = []
ymin = []
xmax = []
ymax = []
for b in bboxes:
assert len(b) == 4
# pylint: disable=expression-not-assigned
[l.append(point) for l, point in zip([ymin, xmin, ymax, xmax], b)]
# pylint: enable=expression-not-assigned
image_format = b'JPEG'
example = tf.train.Example(features=tf.train.Features(feature={
'image/height': self.int64_feature(shape[0]),
'image/width': self.int64_feature(shape[1]),
'image/channels': self.int64_feature(shape[2]),
'image/shape': self.int64_feature(shape),
'image/object/bbox/xmin': self.float_feature(xmin),
'image/object/bbox/xmax': self.float_feature(xmax),
'image/object/bbox/ymin': self.float_feature(ymin),
'image/object/bbox/ymax': self.float_feature(ymax),
'image/object/bbox/label': self.int64_feature(labels),
'image/object/bbox/label_text': self.bytes_feature(labels_text),
'image/object/bbox/difficult': self.int64_feature(difficult),
'image/object/bbox/truncated': self.int64_feature(truncated),
'image/format': self.bytes_feature(image_format),
'image/encoded': self.bytes_feature(image_data)}))
return example
# Loads data from image and annotations files and add them to a TFRecord.
# Inputs:
# dataset_dir: Dataset directory;
# name: Image name to add to the TFRecord;
# tfrecord_writer: The TFRecord writer to use for writing.
def _add_to_tfrecord_PascalVOC(self, dataset_dir, name, tfrecord_writer):
image_data, shape, bboxes, labels, labels_text, difficult, truncated = self._process_image_PascalVOC(dataset_dir, name)
example = self._convert_to_example_PascalVOC(image_data, labels, labels_text, bboxes, shape, difficult, truncated)
tfrecord_writer.write(example.SerializeToString())
def _get_output_filename_PascalVOC(output_dir, name, idx):
return '%s/%s_%03d.tfrecord' % (output_dir, name, idx)
# Convert images to tfrecords
# Args:
# dataset_dir: The dataset directory where the dataset is stored.
# output_dir: Output directory.
def run_PascalVOC(self, dataset_dir, output_dir, name='voc_train', shuffling=False):
if not tf.gfile.Exists(dataset_dir):
tf.gfile.MakeDirs(dataset_dir)
# Dataset filenames, and shuffling.
path = os.path.join(dataset_dir, DIRECTORY_ANNOTATIONS)
filenames = sorted(os.listdir(path))
if shuffling:
random.seed(RANDOM_SEED)
random.shuffle(filenames)
# Process dataset files.
i = 0
fidx = 0
while i < len(filenames):
# Open new TFRecord file.
tf_filename = self._get_output_filename(output_dir, name, fidx)
with tf.python_io.TFRecordWriter(tf_filename) as tfrecord_writer:
j = 0
while i < len(filenames) and j < SAMPLES_PER_FILES:
sys.stdout.write('\r>> Converting image %d/%d' % (i+1, len(filenames)))
sys.stdout.flush()
filename = filenames[i]
img_name = filename[:-4]
self._add_to_tfrecord_PascalVOC(dataset_dir, img_name, tfrecord_writer)
i += 1
j += 1
fidx += 1
print('\n ImageDB to TF conversion finished. ')
# Wrapper for inserting int64 features into Example proto.
def int64_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
# Wrapper for inserting float features into Example proto.
def float_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
# Wrapper for inserting bytes features into Example proto.
def bytes_feature(self, value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
|
flexible
|
{
"blob_id": "c33d625ebd6a40551d2ce0393fd78619601ea7ae",
"index": 5834,
"step-1": "<mask token>\n\n\nclass Dataset(object):\n\n def __init__(self):\n self.items_descriptions = {'image':\n 'A color image of varying height and width.', 'shape':\n 'Shape of the image', 'object/bbox':\n 'A list of bounding boxes, one per each object.',\n 'object/label': 'A list of labels, one per each object.'}\n self.features = {'image/encoded': tf.FixedLenFeature((), tf.string,\n default_value=''), 'image/format': tf.FixedLenFeature((), tf.\n string, default_value='jpeg'), 'image/object/bbox/xmin': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymin': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/xmax': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymax': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/label': tf.\n VarLenFeature(dtype=tf.int64), 'image/object/bbox/difficult':\n tf.VarLenFeature(dtype=tf.int64)}\n self.items = {'image': slim.tfexample_decoder.Image('image/encoded',\n 'image/format'), 'gt_bboxes': slim.tfexample_decoder.\n BoundingBox(['ymin', 'xmin', 'ymax', 'xmax'],\n 'image/object/bbox/'), 'gt_labels': slim.tfexample_decoder.\n Tensor('image/object/bbox/label'), 'difficult_objects': slim.\n tfexample_decoder.Tensor('image/object/bbox/difficult')}\n\n def read_dataset_from_tfrecords(self, dataset_name, train_or_test,\n dataset_path):\n with tf.name_scope(None, 'read_dataset_from_tfrecords') as scope:\n if (dataset_name == 'pascalvoc_2007' or dataset_name ==\n 'pascalvoc_2012'):\n dataset = self.load_dataset(dataset_name, train_or_test,\n dataset_path)\n return dataset\n\n def load_dataset(self, dataset_name, train_or_test, dataset_path):\n dataset_file_name = dataset_name[6:] + '_%s_*.tfrecord'\n if dataset_name == 'pascalvoc_2007':\n train_test_sizes = {'train': FLAGS.pascalvoc_2007_train_size,\n 'test': FLAGS.pascalvoc_2007_test_size}\n elif dataset_name == 'pascalvoc_2012':\n train_test_sizes = {'train': FLAGS.pascalvoc_2012_train_size}\n dataset_file_name = os.path.join(dataset_path, dataset_file_name %\n train_or_test)\n reader = tf.TFRecordReader\n decoder = slim.tfexample_decoder.TFExampleDecoder(self.features,\n self.items)\n return slim.dataset.Dataset(data_sources=dataset_file_name, reader=\n reader, decoder=decoder, num_samples=train_test_sizes[\n train_or_test], items_to_descriptions=self.items_descriptions,\n num_classes=FLAGS.num_classes - 1, labels_to_names=None)\n\n def get_groundtruth_from_dataset(self, dataset, train_or_test):\n with tf.name_scope(None, 'get_groundtruth_from_dataset') as scope:\n if train_or_test == 'test':\n provider = slim.dataset_data_provider.DatasetDataProvider(\n dataset, num_readers=FLAGS.test_num_readers,\n common_queue_capacity=FLAGS.test_common_queue_capacity,\n common_queue_min=FLAGS.test_batch_size, shuffle=FLAGS.\n test_shuffle)\n elif train_or_test == 'train':\n provider = slim.dataset_data_provider.DatasetDataProvider(\n dataset, num_readers=FLAGS.train_num_readers,\n common_queue_capacity=FLAGS.train_common_queue_capacity,\n common_queue_min=10 * FLAGS.train_batch_size, shuffle=\n FLAGS.train_shuffle)\n [image, gt_bboxes, gt_labels] = provider.get(['image',\n 'gt_bboxes', 'gt_labels'])\n gt_difficult_objects = tf.zeros(tf.shape(gt_labels), dtype=tf.int64\n )\n if FLAGS.test_discard_difficult_objects:\n [gt_difficult_objects] = provider.get(['difficult_objects'])\n return [image, gt_bboxes, gt_labels, gt_difficult_objects]\n\n def _process_image_PascalVOC(self, directory, name):\n filename = directory + DIRECTORY_IMAGES + name + '.jpg'\n image_data = tf.gfile.FastGFile(filename, 'r').read()\n filename = os.path.join(directory, DIRECTORY_ANNOTATIONS, name + '.xml'\n )\n tree = ET.parse(filename)\n root = tree.getroot()\n size = root.find('size')\n shape = [int(size.find('height').text), int(size.find('width').text\n ), int(size.find('depth').text)]\n bboxes = []\n labels = []\n labels_text = []\n difficult = []\n truncated = []\n for obj in root.findall('object'):\n label = obj.find('name').text\n labels.append(int(VOC_LABELS[label][0]))\n labels_text.append(label.encode('ascii'))\n if obj.find('difficult'):\n difficult.append(int(obj.find('difficult').text))\n else:\n difficult.append(0)\n if obj.find('truncated'):\n truncated.append(int(obj.find('truncated').text))\n else:\n truncated.append(0)\n bbox = obj.find('bndbox')\n bboxes.append((float(bbox.find('ymin').text) / shape[0], float(\n bbox.find('xmin').text) / shape[1], float(bbox.find('ymax')\n .text) / shape[0], float(bbox.find('xmax').text) / shape[1]))\n return (image_data, shape, bboxes, labels, labels_text, difficult,\n truncated)\n\n def _convert_to_example_PascalVOC(self, image_data, labels, labels_text,\n bboxes, shape, difficult, truncated):\n xmin = []\n ymin = []\n xmax = []\n ymax = []\n for b in bboxes:\n assert len(b) == 4\n [l.append(point) for l, point in zip([ymin, xmin, ymax, xmax], b)]\n image_format = b'JPEG'\n example = tf.train.Example(features=tf.train.Features(feature={\n 'image/height': self.int64_feature(shape[0]), 'image/width':\n self.int64_feature(shape[1]), 'image/channels': self.\n int64_feature(shape[2]), 'image/shape': self.int64_feature(\n shape), 'image/object/bbox/xmin': self.float_feature(xmin),\n 'image/object/bbox/xmax': self.float_feature(xmax),\n 'image/object/bbox/ymin': self.float_feature(ymin),\n 'image/object/bbox/ymax': self.float_feature(ymax),\n 'image/object/bbox/label': self.int64_feature(labels),\n 'image/object/bbox/label_text': self.bytes_feature(labels_text),\n 'image/object/bbox/difficult': self.int64_feature(difficult),\n 'image/object/bbox/truncated': self.int64_feature(truncated),\n 'image/format': self.bytes_feature(image_format),\n 'image/encoded': self.bytes_feature(image_data)}))\n return example\n <mask token>\n\n def _get_output_filename_PascalVOC(output_dir, name, idx):\n return '%s/%s_%03d.tfrecord' % (output_dir, name, idx)\n <mask token>\n\n def int64_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(int64_list=tf.train.Int64List(value=value))\n\n def float_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(float_list=tf.train.FloatList(value=value))\n\n def bytes_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))\n",
"step-2": "<mask token>\n\n\nclass Dataset(object):\n\n def __init__(self):\n self.items_descriptions = {'image':\n 'A color image of varying height and width.', 'shape':\n 'Shape of the image', 'object/bbox':\n 'A list of bounding boxes, one per each object.',\n 'object/label': 'A list of labels, one per each object.'}\n self.features = {'image/encoded': tf.FixedLenFeature((), tf.string,\n default_value=''), 'image/format': tf.FixedLenFeature((), tf.\n string, default_value='jpeg'), 'image/object/bbox/xmin': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymin': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/xmax': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymax': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/label': tf.\n VarLenFeature(dtype=tf.int64), 'image/object/bbox/difficult':\n tf.VarLenFeature(dtype=tf.int64)}\n self.items = {'image': slim.tfexample_decoder.Image('image/encoded',\n 'image/format'), 'gt_bboxes': slim.tfexample_decoder.\n BoundingBox(['ymin', 'xmin', 'ymax', 'xmax'],\n 'image/object/bbox/'), 'gt_labels': slim.tfexample_decoder.\n Tensor('image/object/bbox/label'), 'difficult_objects': slim.\n tfexample_decoder.Tensor('image/object/bbox/difficult')}\n\n def read_dataset_from_tfrecords(self, dataset_name, train_or_test,\n dataset_path):\n with tf.name_scope(None, 'read_dataset_from_tfrecords') as scope:\n if (dataset_name == 'pascalvoc_2007' or dataset_name ==\n 'pascalvoc_2012'):\n dataset = self.load_dataset(dataset_name, train_or_test,\n dataset_path)\n return dataset\n\n def load_dataset(self, dataset_name, train_or_test, dataset_path):\n dataset_file_name = dataset_name[6:] + '_%s_*.tfrecord'\n if dataset_name == 'pascalvoc_2007':\n train_test_sizes = {'train': FLAGS.pascalvoc_2007_train_size,\n 'test': FLAGS.pascalvoc_2007_test_size}\n elif dataset_name == 'pascalvoc_2012':\n train_test_sizes = {'train': FLAGS.pascalvoc_2012_train_size}\n dataset_file_name = os.path.join(dataset_path, dataset_file_name %\n train_or_test)\n reader = tf.TFRecordReader\n decoder = slim.tfexample_decoder.TFExampleDecoder(self.features,\n self.items)\n return slim.dataset.Dataset(data_sources=dataset_file_name, reader=\n reader, decoder=decoder, num_samples=train_test_sizes[\n train_or_test], items_to_descriptions=self.items_descriptions,\n num_classes=FLAGS.num_classes - 1, labels_to_names=None)\n\n def get_groundtruth_from_dataset(self, dataset, train_or_test):\n with tf.name_scope(None, 'get_groundtruth_from_dataset') as scope:\n if train_or_test == 'test':\n provider = slim.dataset_data_provider.DatasetDataProvider(\n dataset, num_readers=FLAGS.test_num_readers,\n common_queue_capacity=FLAGS.test_common_queue_capacity,\n common_queue_min=FLAGS.test_batch_size, shuffle=FLAGS.\n test_shuffle)\n elif train_or_test == 'train':\n provider = slim.dataset_data_provider.DatasetDataProvider(\n dataset, num_readers=FLAGS.train_num_readers,\n common_queue_capacity=FLAGS.train_common_queue_capacity,\n common_queue_min=10 * FLAGS.train_batch_size, shuffle=\n FLAGS.train_shuffle)\n [image, gt_bboxes, gt_labels] = provider.get(['image',\n 'gt_bboxes', 'gt_labels'])\n gt_difficult_objects = tf.zeros(tf.shape(gt_labels), dtype=tf.int64\n )\n if FLAGS.test_discard_difficult_objects:\n [gt_difficult_objects] = provider.get(['difficult_objects'])\n return [image, gt_bboxes, gt_labels, gt_difficult_objects]\n\n def _process_image_PascalVOC(self, directory, name):\n filename = directory + DIRECTORY_IMAGES + name + '.jpg'\n image_data = tf.gfile.FastGFile(filename, 'r').read()\n filename = os.path.join(directory, DIRECTORY_ANNOTATIONS, name + '.xml'\n )\n tree = ET.parse(filename)\n root = tree.getroot()\n size = root.find('size')\n shape = [int(size.find('height').text), int(size.find('width').text\n ), int(size.find('depth').text)]\n bboxes = []\n labels = []\n labels_text = []\n difficult = []\n truncated = []\n for obj in root.findall('object'):\n label = obj.find('name').text\n labels.append(int(VOC_LABELS[label][0]))\n labels_text.append(label.encode('ascii'))\n if obj.find('difficult'):\n difficult.append(int(obj.find('difficult').text))\n else:\n difficult.append(0)\n if obj.find('truncated'):\n truncated.append(int(obj.find('truncated').text))\n else:\n truncated.append(0)\n bbox = obj.find('bndbox')\n bboxes.append((float(bbox.find('ymin').text) / shape[0], float(\n bbox.find('xmin').text) / shape[1], float(bbox.find('ymax')\n .text) / shape[0], float(bbox.find('xmax').text) / shape[1]))\n return (image_data, shape, bboxes, labels, labels_text, difficult,\n truncated)\n\n def _convert_to_example_PascalVOC(self, image_data, labels, labels_text,\n bboxes, shape, difficult, truncated):\n xmin = []\n ymin = []\n xmax = []\n ymax = []\n for b in bboxes:\n assert len(b) == 4\n [l.append(point) for l, point in zip([ymin, xmin, ymax, xmax], b)]\n image_format = b'JPEG'\n example = tf.train.Example(features=tf.train.Features(feature={\n 'image/height': self.int64_feature(shape[0]), 'image/width':\n self.int64_feature(shape[1]), 'image/channels': self.\n int64_feature(shape[2]), 'image/shape': self.int64_feature(\n shape), 'image/object/bbox/xmin': self.float_feature(xmin),\n 'image/object/bbox/xmax': self.float_feature(xmax),\n 'image/object/bbox/ymin': self.float_feature(ymin),\n 'image/object/bbox/ymax': self.float_feature(ymax),\n 'image/object/bbox/label': self.int64_feature(labels),\n 'image/object/bbox/label_text': self.bytes_feature(labels_text),\n 'image/object/bbox/difficult': self.int64_feature(difficult),\n 'image/object/bbox/truncated': self.int64_feature(truncated),\n 'image/format': self.bytes_feature(image_format),\n 'image/encoded': self.bytes_feature(image_data)}))\n return example\n\n def _add_to_tfrecord_PascalVOC(self, dataset_dir, name, tfrecord_writer):\n (image_data, shape, bboxes, labels, labels_text, difficult, truncated\n ) = self._process_image_PascalVOC(dataset_dir, name)\n example = self._convert_to_example_PascalVOC(image_data, labels,\n labels_text, bboxes, shape, difficult, truncated)\n tfrecord_writer.write(example.SerializeToString())\n\n def _get_output_filename_PascalVOC(output_dir, name, idx):\n return '%s/%s_%03d.tfrecord' % (output_dir, name, idx)\n <mask token>\n\n def int64_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(int64_list=tf.train.Int64List(value=value))\n\n def float_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(float_list=tf.train.FloatList(value=value))\n\n def bytes_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))\n",
"step-3": "<mask token>\n\n\nclass Dataset(object):\n\n def __init__(self):\n self.items_descriptions = {'image':\n 'A color image of varying height and width.', 'shape':\n 'Shape of the image', 'object/bbox':\n 'A list of bounding boxes, one per each object.',\n 'object/label': 'A list of labels, one per each object.'}\n self.features = {'image/encoded': tf.FixedLenFeature((), tf.string,\n default_value=''), 'image/format': tf.FixedLenFeature((), tf.\n string, default_value='jpeg'), 'image/object/bbox/xmin': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymin': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/xmax': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymax': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/label': tf.\n VarLenFeature(dtype=tf.int64), 'image/object/bbox/difficult':\n tf.VarLenFeature(dtype=tf.int64)}\n self.items = {'image': slim.tfexample_decoder.Image('image/encoded',\n 'image/format'), 'gt_bboxes': slim.tfexample_decoder.\n BoundingBox(['ymin', 'xmin', 'ymax', 'xmax'],\n 'image/object/bbox/'), 'gt_labels': slim.tfexample_decoder.\n Tensor('image/object/bbox/label'), 'difficult_objects': slim.\n tfexample_decoder.Tensor('image/object/bbox/difficult')}\n\n def read_dataset_from_tfrecords(self, dataset_name, train_or_test,\n dataset_path):\n with tf.name_scope(None, 'read_dataset_from_tfrecords') as scope:\n if (dataset_name == 'pascalvoc_2007' or dataset_name ==\n 'pascalvoc_2012'):\n dataset = self.load_dataset(dataset_name, train_or_test,\n dataset_path)\n return dataset\n\n def load_dataset(self, dataset_name, train_or_test, dataset_path):\n dataset_file_name = dataset_name[6:] + '_%s_*.tfrecord'\n if dataset_name == 'pascalvoc_2007':\n train_test_sizes = {'train': FLAGS.pascalvoc_2007_train_size,\n 'test': FLAGS.pascalvoc_2007_test_size}\n elif dataset_name == 'pascalvoc_2012':\n train_test_sizes = {'train': FLAGS.pascalvoc_2012_train_size}\n dataset_file_name = os.path.join(dataset_path, dataset_file_name %\n train_or_test)\n reader = tf.TFRecordReader\n decoder = slim.tfexample_decoder.TFExampleDecoder(self.features,\n self.items)\n return slim.dataset.Dataset(data_sources=dataset_file_name, reader=\n reader, decoder=decoder, num_samples=train_test_sizes[\n train_or_test], items_to_descriptions=self.items_descriptions,\n num_classes=FLAGS.num_classes - 1, labels_to_names=None)\n\n def get_groundtruth_from_dataset(self, dataset, train_or_test):\n with tf.name_scope(None, 'get_groundtruth_from_dataset') as scope:\n if train_or_test == 'test':\n provider = slim.dataset_data_provider.DatasetDataProvider(\n dataset, num_readers=FLAGS.test_num_readers,\n common_queue_capacity=FLAGS.test_common_queue_capacity,\n common_queue_min=FLAGS.test_batch_size, shuffle=FLAGS.\n test_shuffle)\n elif train_or_test == 'train':\n provider = slim.dataset_data_provider.DatasetDataProvider(\n dataset, num_readers=FLAGS.train_num_readers,\n common_queue_capacity=FLAGS.train_common_queue_capacity,\n common_queue_min=10 * FLAGS.train_batch_size, shuffle=\n FLAGS.train_shuffle)\n [image, gt_bboxes, gt_labels] = provider.get(['image',\n 'gt_bboxes', 'gt_labels'])\n gt_difficult_objects = tf.zeros(tf.shape(gt_labels), dtype=tf.int64\n )\n if FLAGS.test_discard_difficult_objects:\n [gt_difficult_objects] = provider.get(['difficult_objects'])\n return [image, gt_bboxes, gt_labels, gt_difficult_objects]\n\n def _process_image_PascalVOC(self, directory, name):\n filename = directory + DIRECTORY_IMAGES + name + '.jpg'\n image_data = tf.gfile.FastGFile(filename, 'r').read()\n filename = os.path.join(directory, DIRECTORY_ANNOTATIONS, name + '.xml'\n )\n tree = ET.parse(filename)\n root = tree.getroot()\n size = root.find('size')\n shape = [int(size.find('height').text), int(size.find('width').text\n ), int(size.find('depth').text)]\n bboxes = []\n labels = []\n labels_text = []\n difficult = []\n truncated = []\n for obj in root.findall('object'):\n label = obj.find('name').text\n labels.append(int(VOC_LABELS[label][0]))\n labels_text.append(label.encode('ascii'))\n if obj.find('difficult'):\n difficult.append(int(obj.find('difficult').text))\n else:\n difficult.append(0)\n if obj.find('truncated'):\n truncated.append(int(obj.find('truncated').text))\n else:\n truncated.append(0)\n bbox = obj.find('bndbox')\n bboxes.append((float(bbox.find('ymin').text) / shape[0], float(\n bbox.find('xmin').text) / shape[1], float(bbox.find('ymax')\n .text) / shape[0], float(bbox.find('xmax').text) / shape[1]))\n return (image_data, shape, bboxes, labels, labels_text, difficult,\n truncated)\n\n def _convert_to_example_PascalVOC(self, image_data, labels, labels_text,\n bboxes, shape, difficult, truncated):\n xmin = []\n ymin = []\n xmax = []\n ymax = []\n for b in bboxes:\n assert len(b) == 4\n [l.append(point) for l, point in zip([ymin, xmin, ymax, xmax], b)]\n image_format = b'JPEG'\n example = tf.train.Example(features=tf.train.Features(feature={\n 'image/height': self.int64_feature(shape[0]), 'image/width':\n self.int64_feature(shape[1]), 'image/channels': self.\n int64_feature(shape[2]), 'image/shape': self.int64_feature(\n shape), 'image/object/bbox/xmin': self.float_feature(xmin),\n 'image/object/bbox/xmax': self.float_feature(xmax),\n 'image/object/bbox/ymin': self.float_feature(ymin),\n 'image/object/bbox/ymax': self.float_feature(ymax),\n 'image/object/bbox/label': self.int64_feature(labels),\n 'image/object/bbox/label_text': self.bytes_feature(labels_text),\n 'image/object/bbox/difficult': self.int64_feature(difficult),\n 'image/object/bbox/truncated': self.int64_feature(truncated),\n 'image/format': self.bytes_feature(image_format),\n 'image/encoded': self.bytes_feature(image_data)}))\n return example\n\n def _add_to_tfrecord_PascalVOC(self, dataset_dir, name, tfrecord_writer):\n (image_data, shape, bboxes, labels, labels_text, difficult, truncated\n ) = self._process_image_PascalVOC(dataset_dir, name)\n example = self._convert_to_example_PascalVOC(image_data, labels,\n labels_text, bboxes, shape, difficult, truncated)\n tfrecord_writer.write(example.SerializeToString())\n\n def _get_output_filename_PascalVOC(output_dir, name, idx):\n return '%s/%s_%03d.tfrecord' % (output_dir, name, idx)\n\n def run_PascalVOC(self, dataset_dir, output_dir, name='voc_train',\n shuffling=False):\n if not tf.gfile.Exists(dataset_dir):\n tf.gfile.MakeDirs(dataset_dir)\n path = os.path.join(dataset_dir, DIRECTORY_ANNOTATIONS)\n filenames = sorted(os.listdir(path))\n if shuffling:\n random.seed(RANDOM_SEED)\n random.shuffle(filenames)\n i = 0\n fidx = 0\n while i < len(filenames):\n tf_filename = self._get_output_filename(output_dir, name, fidx)\n with tf.python_io.TFRecordWriter(tf_filename) as tfrecord_writer:\n j = 0\n while i < len(filenames) and j < SAMPLES_PER_FILES:\n sys.stdout.write('\\r>> Converting image %d/%d' % (i + 1,\n len(filenames)))\n sys.stdout.flush()\n filename = filenames[i]\n img_name = filename[:-4]\n self._add_to_tfrecord_PascalVOC(dataset_dir, img_name,\n tfrecord_writer)\n i += 1\n j += 1\n fidx += 1\n print('\\n ImageDB to TF conversion finished. ')\n\n def int64_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(int64_list=tf.train.Int64List(value=value))\n\n def float_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(float_list=tf.train.FloatList(value=value))\n\n def bytes_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))\n",
"step-4": "<mask token>\nDIRECTORY_ANNOTATIONS = 'Annotations/'\nDIRECTORY_IMAGES = 'JPEGImages/'\nRANDOM_SEED = 4242\nSAMPLES_PER_FILES = 200\nslim = tf.contrib.slim\n\n\nclass Dataset(object):\n\n def __init__(self):\n self.items_descriptions = {'image':\n 'A color image of varying height and width.', 'shape':\n 'Shape of the image', 'object/bbox':\n 'A list of bounding boxes, one per each object.',\n 'object/label': 'A list of labels, one per each object.'}\n self.features = {'image/encoded': tf.FixedLenFeature((), tf.string,\n default_value=''), 'image/format': tf.FixedLenFeature((), tf.\n string, default_value='jpeg'), 'image/object/bbox/xmin': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymin': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/xmax': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymax': tf.\n VarLenFeature(dtype=tf.float32), 'image/object/bbox/label': tf.\n VarLenFeature(dtype=tf.int64), 'image/object/bbox/difficult':\n tf.VarLenFeature(dtype=tf.int64)}\n self.items = {'image': slim.tfexample_decoder.Image('image/encoded',\n 'image/format'), 'gt_bboxes': slim.tfexample_decoder.\n BoundingBox(['ymin', 'xmin', 'ymax', 'xmax'],\n 'image/object/bbox/'), 'gt_labels': slim.tfexample_decoder.\n Tensor('image/object/bbox/label'), 'difficult_objects': slim.\n tfexample_decoder.Tensor('image/object/bbox/difficult')}\n\n def read_dataset_from_tfrecords(self, dataset_name, train_or_test,\n dataset_path):\n with tf.name_scope(None, 'read_dataset_from_tfrecords') as scope:\n if (dataset_name == 'pascalvoc_2007' or dataset_name ==\n 'pascalvoc_2012'):\n dataset = self.load_dataset(dataset_name, train_or_test,\n dataset_path)\n return dataset\n\n def load_dataset(self, dataset_name, train_or_test, dataset_path):\n dataset_file_name = dataset_name[6:] + '_%s_*.tfrecord'\n if dataset_name == 'pascalvoc_2007':\n train_test_sizes = {'train': FLAGS.pascalvoc_2007_train_size,\n 'test': FLAGS.pascalvoc_2007_test_size}\n elif dataset_name == 'pascalvoc_2012':\n train_test_sizes = {'train': FLAGS.pascalvoc_2012_train_size}\n dataset_file_name = os.path.join(dataset_path, dataset_file_name %\n train_or_test)\n reader = tf.TFRecordReader\n decoder = slim.tfexample_decoder.TFExampleDecoder(self.features,\n self.items)\n return slim.dataset.Dataset(data_sources=dataset_file_name, reader=\n reader, decoder=decoder, num_samples=train_test_sizes[\n train_or_test], items_to_descriptions=self.items_descriptions,\n num_classes=FLAGS.num_classes - 1, labels_to_names=None)\n\n def get_groundtruth_from_dataset(self, dataset, train_or_test):\n with tf.name_scope(None, 'get_groundtruth_from_dataset') as scope:\n if train_or_test == 'test':\n provider = slim.dataset_data_provider.DatasetDataProvider(\n dataset, num_readers=FLAGS.test_num_readers,\n common_queue_capacity=FLAGS.test_common_queue_capacity,\n common_queue_min=FLAGS.test_batch_size, shuffle=FLAGS.\n test_shuffle)\n elif train_or_test == 'train':\n provider = slim.dataset_data_provider.DatasetDataProvider(\n dataset, num_readers=FLAGS.train_num_readers,\n common_queue_capacity=FLAGS.train_common_queue_capacity,\n common_queue_min=10 * FLAGS.train_batch_size, shuffle=\n FLAGS.train_shuffle)\n [image, gt_bboxes, gt_labels] = provider.get(['image',\n 'gt_bboxes', 'gt_labels'])\n gt_difficult_objects = tf.zeros(tf.shape(gt_labels), dtype=tf.int64\n )\n if FLAGS.test_discard_difficult_objects:\n [gt_difficult_objects] = provider.get(['difficult_objects'])\n return [image, gt_bboxes, gt_labels, gt_difficult_objects]\n\n def _process_image_PascalVOC(self, directory, name):\n filename = directory + DIRECTORY_IMAGES + name + '.jpg'\n image_data = tf.gfile.FastGFile(filename, 'r').read()\n filename = os.path.join(directory, DIRECTORY_ANNOTATIONS, name + '.xml'\n )\n tree = ET.parse(filename)\n root = tree.getroot()\n size = root.find('size')\n shape = [int(size.find('height').text), int(size.find('width').text\n ), int(size.find('depth').text)]\n bboxes = []\n labels = []\n labels_text = []\n difficult = []\n truncated = []\n for obj in root.findall('object'):\n label = obj.find('name').text\n labels.append(int(VOC_LABELS[label][0]))\n labels_text.append(label.encode('ascii'))\n if obj.find('difficult'):\n difficult.append(int(obj.find('difficult').text))\n else:\n difficult.append(0)\n if obj.find('truncated'):\n truncated.append(int(obj.find('truncated').text))\n else:\n truncated.append(0)\n bbox = obj.find('bndbox')\n bboxes.append((float(bbox.find('ymin').text) / shape[0], float(\n bbox.find('xmin').text) / shape[1], float(bbox.find('ymax')\n .text) / shape[0], float(bbox.find('xmax').text) / shape[1]))\n return (image_data, shape, bboxes, labels, labels_text, difficult,\n truncated)\n\n def _convert_to_example_PascalVOC(self, image_data, labels, labels_text,\n bboxes, shape, difficult, truncated):\n xmin = []\n ymin = []\n xmax = []\n ymax = []\n for b in bboxes:\n assert len(b) == 4\n [l.append(point) for l, point in zip([ymin, xmin, ymax, xmax], b)]\n image_format = b'JPEG'\n example = tf.train.Example(features=tf.train.Features(feature={\n 'image/height': self.int64_feature(shape[0]), 'image/width':\n self.int64_feature(shape[1]), 'image/channels': self.\n int64_feature(shape[2]), 'image/shape': self.int64_feature(\n shape), 'image/object/bbox/xmin': self.float_feature(xmin),\n 'image/object/bbox/xmax': self.float_feature(xmax),\n 'image/object/bbox/ymin': self.float_feature(ymin),\n 'image/object/bbox/ymax': self.float_feature(ymax),\n 'image/object/bbox/label': self.int64_feature(labels),\n 'image/object/bbox/label_text': self.bytes_feature(labels_text),\n 'image/object/bbox/difficult': self.int64_feature(difficult),\n 'image/object/bbox/truncated': self.int64_feature(truncated),\n 'image/format': self.bytes_feature(image_format),\n 'image/encoded': self.bytes_feature(image_data)}))\n return example\n\n def _add_to_tfrecord_PascalVOC(self, dataset_dir, name, tfrecord_writer):\n (image_data, shape, bboxes, labels, labels_text, difficult, truncated\n ) = self._process_image_PascalVOC(dataset_dir, name)\n example = self._convert_to_example_PascalVOC(image_data, labels,\n labels_text, bboxes, shape, difficult, truncated)\n tfrecord_writer.write(example.SerializeToString())\n\n def _get_output_filename_PascalVOC(output_dir, name, idx):\n return '%s/%s_%03d.tfrecord' % (output_dir, name, idx)\n\n def run_PascalVOC(self, dataset_dir, output_dir, name='voc_train',\n shuffling=False):\n if not tf.gfile.Exists(dataset_dir):\n tf.gfile.MakeDirs(dataset_dir)\n path = os.path.join(dataset_dir, DIRECTORY_ANNOTATIONS)\n filenames = sorted(os.listdir(path))\n if shuffling:\n random.seed(RANDOM_SEED)\n random.shuffle(filenames)\n i = 0\n fidx = 0\n while i < len(filenames):\n tf_filename = self._get_output_filename(output_dir, name, fidx)\n with tf.python_io.TFRecordWriter(tf_filename) as tfrecord_writer:\n j = 0\n while i < len(filenames) and j < SAMPLES_PER_FILES:\n sys.stdout.write('\\r>> Converting image %d/%d' % (i + 1,\n len(filenames)))\n sys.stdout.flush()\n filename = filenames[i]\n img_name = filename[:-4]\n self._add_to_tfrecord_PascalVOC(dataset_dir, img_name,\n tfrecord_writer)\n i += 1\n j += 1\n fidx += 1\n print('\\n ImageDB to TF conversion finished. ')\n\n def int64_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(int64_list=tf.train.Int64List(value=value))\n\n def float_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(float_list=tf.train.FloatList(value=value))\n\n def bytes_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))\n",
"step-5": "\n# This module is used to load pascalvoc datasets (2007 or 2012)\nimport os\nimport tensorflow as tf\nfrom configs.config_common import *\nfrom configs.config_train import *\nfrom configs.config_test import *\nimport sys\nimport random\nimport numpy as np\nimport xml.etree.ElementTree as ET\n\n# Original dataset organisation.\nDIRECTORY_ANNOTATIONS = 'Annotations/'\nDIRECTORY_IMAGES = 'JPEGImages/'\n\n# TFRecords convertion parameters.\nRANDOM_SEED = 4242\nSAMPLES_PER_FILES = 200\n\nslim = tf.contrib.slim\n\n\n\nclass Dataset(object):\n\n def __init__(self):\n # Descriptions of the image items\n self.items_descriptions = {\n 'image': 'A color image of varying height and width.',\n 'shape': 'Shape of the image',\n 'object/bbox': 'A list of bounding boxes, one per each object.',\n 'object/label': 'A list of labels, one per each object.',\n }\n # Features of Pascal VOC TFRecords.\n self.features = {\n 'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''),\n 'image/format': tf.FixedLenFeature((), tf.string, default_value='jpeg'),\n 'image/object/bbox/xmin': tf.VarLenFeature(dtype=tf.float32),\n 'image/object/bbox/ymin': tf.VarLenFeature(dtype=tf.float32),\n 'image/object/bbox/xmax': tf.VarLenFeature(dtype=tf.float32),\n 'image/object/bbox/ymax': tf.VarLenFeature(dtype=tf.float32),\n 'image/object/bbox/label': tf.VarLenFeature(dtype=tf.int64),\n 'image/object/bbox/difficult': tf.VarLenFeature(dtype=tf.int64),\n }\n # Items in Pascal VOC TFRecords.\n self.items = {\n 'image': slim.tfexample_decoder.Image('image/encoded', 'image/format'),\n 'gt_bboxes': slim.tfexample_decoder.BoundingBox(['ymin','xmin','ymax','xmax'], 'image/object/bbox/'),\n 'gt_labels': slim.tfexample_decoder.Tensor('image/object/bbox/label'),\n 'difficult_objects': slim.tfexample_decoder.Tensor('image/object/bbox/difficult'),\n }\n\n\n\n # This function reads dataset from tfrecords\n # Inputs:\n # datase_name: pascalvoc_2007\n # train_or_test: test\n # dataset_path: './tfrecords_test/'\n # Outputs:\n # loaded dataset\n def read_dataset_from_tfrecords(self, dataset_name, train_or_test, dataset_path):\n with tf.name_scope(None, \"read_dataset_from_tfrecords\") as scope:\n if dataset_name == 'pascalvoc_2007' or dataset_name == 'pascalvoc_2012':\n dataset = self.load_dataset(dataset_name, train_or_test, dataset_path)\n return dataset\n\n\n\n # This function is used to load pascalvoc2007 or psaclvoc2012 datasets\n # Inputs:\n # dataset_name: pascalvoc_2007\n # train_or_test: test\n # dataset_path: './tfrecords_test/'\n # Output:\n # loaded dataset \n def load_dataset(self, dataset_name, train_or_test, dataset_path):\n dataset_file_name = dataset_name[6:] + '_%s_*.tfrecord'\n if dataset_name == 'pascalvoc_2007':\n train_test_sizes = {\n 'train': FLAGS.pascalvoc_2007_train_size,\n 'test': FLAGS.pascalvoc_2007_test_size,\n }\n elif dataset_name == 'pascalvoc_2012':\n train_test_sizes = {\n 'train': FLAGS.pascalvoc_2012_train_size, \n } \n dataset_file_name = os.path.join(dataset_path, dataset_file_name % train_or_test)\n reader = tf.TFRecordReader\n decoder = slim.tfexample_decoder.TFExampleDecoder(self.features, self.items)\n return slim.dataset.Dataset(\n data_sources=dataset_file_name,\n reader=reader,\n decoder=decoder,\n num_samples=train_test_sizes[train_or_test],\n items_to_descriptions=self.items_descriptions,\n num_classes=FLAGS.num_classes-1,\n labels_to_names=None)\n\n\n\n # This function gets groundtruth bboxes & labels from dataset\n # Inputs:\n # dataset\n # train_or_test: train/test\n # Output:\n # image, ground-truth bboxes, ground-truth labels, ground-truth difficult objects\n def get_groundtruth_from_dataset(self, dataset, train_or_test):\n # Dataset provider\n with tf.name_scope(None, \"get_groundtruth_from_dataset\") as scope:\n if train_or_test == 'test':\n provider = slim.dataset_data_provider.DatasetDataProvider(\n dataset,\n num_readers=FLAGS.test_num_readers,\n common_queue_capacity=FLAGS.test_common_queue_capacity,\n common_queue_min=FLAGS.test_batch_size,\n shuffle=FLAGS.test_shuffle)\n elif train_or_test == 'train':\n provider = slim.dataset_data_provider.DatasetDataProvider(\n dataset,\n num_readers= FLAGS.train_num_readers,\n common_queue_capacity= FLAGS.train_common_queue_capacity,\n common_queue_min= 10 * FLAGS.train_batch_size,\n shuffle=FLAGS.train_shuffle)\n # Get images, groundtruth bboxes & groundtruth labels from database\n [image, gt_bboxes, gt_labels] = provider.get(['image','gt_bboxes','gt_labels'])\n # Discard difficult objects\n gt_difficult_objects = tf.zeros(tf.shape(gt_labels), dtype=tf.int64)\n if FLAGS.test_discard_difficult_objects:\n [gt_difficult_objects] = provider.get(['difficult_objects'])\n return [image, gt_bboxes, gt_labels, gt_difficult_objects]\n\n\n\n\n\n ##########################################\n # Convert PascalVOC to TF recorsd\n # Process a image and annotation file.\n # Inputs:\n # filename: string, path to an image file e.g., '/path/to/example.JPG'.\n # coder: instance of ImageCoder to provide TensorFlow image coding utils.\n # Outputs:\n # image_buffer: string, JPEG encoding of RGB image.\n # height: integer, image height in pixels.\n # width: integer, image width in pixels.\n def _process_image_PascalVOC(self, directory, name):\n\n # Read the image file.\n filename = directory + DIRECTORY_IMAGES + name + '.jpg'\n image_data = tf.gfile.FastGFile(filename, 'r').read()\n\n # Read the XML annotation file.\n filename = os.path.join(directory, DIRECTORY_ANNOTATIONS, name + '.xml')\n tree = ET.parse(filename)\n root = tree.getroot()\n\n # Image shape.\n size = root.find('size')\n shape = [int(size.find('height').text), int(size.find('width').text), int(size.find('depth').text)]\n # Find annotations.\n bboxes = []\n labels = []\n labels_text = []\n difficult = []\n truncated = []\n for obj in root.findall('object'):\n label = obj.find('name').text\n labels.append(int(VOC_LABELS[label][0]))\n labels_text.append(label.encode('ascii'))\n\n if obj.find('difficult'):\n difficult.append(int(obj.find('difficult').text))\n else:\n difficult.append(0)\n if obj.find('truncated'):\n truncated.append(int(obj.find('truncated').text))\n else:\n truncated.append(0)\n\n bbox = obj.find('bndbox')\n bboxes.append((float(bbox.find('ymin').text) / shape[0],\n float(bbox.find('xmin').text) / shape[1],\n float(bbox.find('ymax').text) / shape[0],\n float(bbox.find('xmax').text) / shape[1]\n ))\n return image_data, shape, bboxes, labels, labels_text, difficult, truncated\n\n\n\n\n # Build an Example proto for an image example.\n # Args:\n # image_data: string, JPEG encoding of RGB image;\n # labels: list of integers, identifier for the ground truth;\n # labels_text: list of strings, human-readable labels;\n # bboxes: list of bounding boxes; each box is a list of integers;\n # shape: 3 integers, image shapes in pixels.\n # Returns:\n # Example proto\n def _convert_to_example_PascalVOC(self, image_data, labels, labels_text, bboxes, shape, difficult, truncated):\n\n xmin = []\n ymin = []\n xmax = []\n ymax = []\n for b in bboxes:\n assert len(b) == 4\n # pylint: disable=expression-not-assigned\n [l.append(point) for l, point in zip([ymin, xmin, ymax, xmax], b)]\n # pylint: enable=expression-not-assigned\n\n image_format = b'JPEG'\n example = tf.train.Example(features=tf.train.Features(feature={\n 'image/height': self.int64_feature(shape[0]),\n 'image/width': self.int64_feature(shape[1]),\n 'image/channels': self.int64_feature(shape[2]),\n 'image/shape': self.int64_feature(shape),\n 'image/object/bbox/xmin': self.float_feature(xmin),\n 'image/object/bbox/xmax': self.float_feature(xmax),\n 'image/object/bbox/ymin': self.float_feature(ymin),\n 'image/object/bbox/ymax': self.float_feature(ymax),\n 'image/object/bbox/label': self.int64_feature(labels),\n 'image/object/bbox/label_text': self.bytes_feature(labels_text),\n 'image/object/bbox/difficult': self.int64_feature(difficult),\n 'image/object/bbox/truncated': self.int64_feature(truncated),\n 'image/format': self.bytes_feature(image_format),\n 'image/encoded': self.bytes_feature(image_data)}))\n return example\n\n\n\n # Loads data from image and annotations files and add them to a TFRecord.\n # Inputs:\n # dataset_dir: Dataset directory;\n # name: Image name to add to the TFRecord;\n # tfrecord_writer: The TFRecord writer to use for writing.\n def _add_to_tfrecord_PascalVOC(self, dataset_dir, name, tfrecord_writer):\n\n image_data, shape, bboxes, labels, labels_text, difficult, truncated = self._process_image_PascalVOC(dataset_dir, name)\n example = self._convert_to_example_PascalVOC(image_data, labels, labels_text, bboxes, shape, difficult, truncated)\n tfrecord_writer.write(example.SerializeToString())\n\n\n\n def _get_output_filename_PascalVOC(output_dir, name, idx):\n return '%s/%s_%03d.tfrecord' % (output_dir, name, idx)\n\n\n\n # Convert images to tfrecords\n # Args:\n # dataset_dir: The dataset directory where the dataset is stored.\n # output_dir: Output directory.\n def run_PascalVOC(self, dataset_dir, output_dir, name='voc_train', shuffling=False):\n\n if not tf.gfile.Exists(dataset_dir):\n tf.gfile.MakeDirs(dataset_dir)\n # Dataset filenames, and shuffling.\n path = os.path.join(dataset_dir, DIRECTORY_ANNOTATIONS)\n filenames = sorted(os.listdir(path))\n if shuffling:\n random.seed(RANDOM_SEED)\n random.shuffle(filenames)\n # Process dataset files.\n i = 0\n fidx = 0\n while i < len(filenames):\n # Open new TFRecord file.\n tf_filename = self._get_output_filename(output_dir, name, fidx)\n with tf.python_io.TFRecordWriter(tf_filename) as tfrecord_writer:\n j = 0\n while i < len(filenames) and j < SAMPLES_PER_FILES:\n sys.stdout.write('\\r>> Converting image %d/%d' % (i+1, len(filenames)))\n sys.stdout.flush()\n\n filename = filenames[i]\n img_name = filename[:-4]\n self._add_to_tfrecord_PascalVOC(dataset_dir, img_name, tfrecord_writer)\n i += 1\n j += 1\n fidx += 1\n print('\\n ImageDB to TF conversion finished. ')\n\n\n\n # Wrapper for inserting int64 features into Example proto.\n def int64_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(int64_list=tf.train.Int64List(value=value))\n\n\n # Wrapper for inserting float features into Example proto.\n def float_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(float_list=tf.train.FloatList(value=value))\n\n\n # Wrapper for inserting bytes features into Example proto.\n def bytes_feature(self, value):\n if not isinstance(value, list):\n value = [value]\n return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))\n\n\n\n\n\n\n \n\n\n",
"step-ids": [
11,
12,
13,
14,
16
]
}
|
[
11,
12,
13,
14,
16
] |
from .line_detection_research import score_pixel_v3p2
|
normal
|
{
"blob_id": "305554fc86ddc116677b6d95db7d94d9f2213c41",
"index": 5088,
"step-1": "<mask token>\n",
"step-2": "from .line_detection_research import score_pixel_v3p2\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-15 18:46
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('aposta', '0003_aposta_nome'),
]
operations = [
migrations.CreateModel(
name='Aposta2',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('aposta_identificacao', models.CharField(max_length=200)),
('valor', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Concurso2',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('concurso_edicao', models.CharField(max_length=20)),
('pub_data', models.DateTimeField(verbose_name='data de publicacao')),
],
),
migrations.AlterField(
model_name='aposta',
name='dataAposta',
field=models.DateField(),
),
migrations.AddField(
model_name='aposta2',
name='Concurso2_identificao',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aposta.Concurso2'),
),
]
|
normal
|
{
"blob_id": "a917dd6171a78142fefa8c8bfad0110729fc1bb0",
"index": 3190,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('aposta', '0003_aposta_nome')]\n operations = [migrations.CreateModel(name='Aposta2', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('aposta_identificacao', models.\n CharField(max_length=200)), ('valor', models.IntegerField(default=0\n ))]), migrations.CreateModel(name='Concurso2', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('concurso_edicao', models.CharField(\n max_length=20)), ('pub_data', models.DateTimeField(verbose_name=\n 'data de publicacao'))]), migrations.AlterField(model_name='aposta',\n name='dataAposta', field=models.DateField()), migrations.AddField(\n model_name='aposta2', name='Concurso2_identificao', field=models.\n ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'aposta.Concurso2'))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('aposta', '0003_aposta_nome')]\n operations = [migrations.CreateModel(name='Aposta2', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('aposta_identificacao', models.\n CharField(max_length=200)), ('valor', models.IntegerField(default=0\n ))]), migrations.CreateModel(name='Concurso2', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('concurso_edicao', models.CharField(\n max_length=20)), ('pub_data', models.DateTimeField(verbose_name=\n 'data de publicacao'))]), migrations.AlterField(model_name='aposta',\n name='dataAposta', field=models.DateField()), migrations.AddField(\n model_name='aposta2', name='Concurso2_identificao', field=models.\n ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'aposta.Concurso2'))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.6 on 2017-04-15 18:46\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('aposta', '0003_aposta_nome'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Aposta2',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('aposta_identificacao', models.CharField(max_length=200)),\n ('valor', models.IntegerField(default=0)),\n ],\n ),\n migrations.CreateModel(\n name='Concurso2',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('concurso_edicao', models.CharField(max_length=20)),\n ('pub_data', models.DateTimeField(verbose_name='data de publicacao')),\n ],\n ),\n migrations.AlterField(\n model_name='aposta',\n name='dataAposta',\n field=models.DateField(),\n ),\n migrations.AddField(\n model_name='aposta2',\n name='Concurso2_identificao',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aposta.Concurso2'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#C:\utils\Python\Python27\python.exe incompletosClean.py incompletos\inc.dat incompletos\out.dat
import sys
import os
import os.path
bfTmp = ''
lsOutTmp = []
InFileName = []
lsHTMLName = []
fileNameIn= sys.argv[1]
fileNameOu= sys.argv[2]
fo = open(fileNameIn)
InFileName += [x.replace('\n', '') for x in fo.readlines()]
fo.close()
for bfMatFile in InFileName:
if os.path.isfile(bfMatFile):
lsHTMLName = []
fo = open(bfMatFile)
lsHTMLName += [x.replace('\n', '') for x in fo.readlines()]
fo.close()
bfRow = ''
for rowHTML in lsHTMLName:
iPosic = rowHTML.find('<td><p>')
if iPosic > 0:
bfRowPart = rowHTML[iPosic + len('<td><p>'):]
bfRow += ((bfRowPart[:bfRowPart.index('</p></td>')] + ',').replace(' ', ',')).strip()
if bfRow != '':
lsOutTmp.append(bfRow[:len(bfRow)-1] + ';')
bufferTmp = '\n'
bufferTmp = bufferTmp.join(lsOutTmp)
fo= open(fileNameOu, 'w')
fo.write(bufferTmp)
fo.close()
|
normal
|
{
"blob_id": "031727fa42b87260abb671518b2baeff1c9524f9",
"index": 8913,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nInFileName += [x.replace('\\n', '') for x in fo.readlines()]\nfo.close()\nfor bfMatFile in InFileName:\n if os.path.isfile(bfMatFile):\n lsHTMLName = []\n fo = open(bfMatFile)\n lsHTMLName += [x.replace('\\n', '') for x in fo.readlines()]\n fo.close()\n bfRow = ''\n for rowHTML in lsHTMLName:\n iPosic = rowHTML.find('<td><p>')\n if iPosic > 0:\n bfRowPart = rowHTML[iPosic + len('<td><p>'):]\n bfRow += (bfRowPart[:bfRowPart.index('</p></td>')] + ','\n ).replace(' ', ',').strip()\n if bfRow != '':\n lsOutTmp.append(bfRow[:len(bfRow) - 1] + ';')\n<mask token>\nfo.write(bufferTmp)\nfo.close()\n",
"step-3": "<mask token>\nbfTmp = ''\nlsOutTmp = []\nInFileName = []\nlsHTMLName = []\nfileNameIn = sys.argv[1]\nfileNameOu = sys.argv[2]\nfo = open(fileNameIn)\nInFileName += [x.replace('\\n', '') for x in fo.readlines()]\nfo.close()\nfor bfMatFile in InFileName:\n if os.path.isfile(bfMatFile):\n lsHTMLName = []\n fo = open(bfMatFile)\n lsHTMLName += [x.replace('\\n', '') for x in fo.readlines()]\n fo.close()\n bfRow = ''\n for rowHTML in lsHTMLName:\n iPosic = rowHTML.find('<td><p>')\n if iPosic > 0:\n bfRowPart = rowHTML[iPosic + len('<td><p>'):]\n bfRow += (bfRowPart[:bfRowPart.index('</p></td>')] + ','\n ).replace(' ', ',').strip()\n if bfRow != '':\n lsOutTmp.append(bfRow[:len(bfRow) - 1] + ';')\nbufferTmp = '\\n'\nbufferTmp = bufferTmp.join(lsOutTmp)\nfo = open(fileNameOu, 'w')\nfo.write(bufferTmp)\nfo.close()\n",
"step-4": "import sys\nimport os\nimport os.path\nbfTmp = ''\nlsOutTmp = []\nInFileName = []\nlsHTMLName = []\nfileNameIn = sys.argv[1]\nfileNameOu = sys.argv[2]\nfo = open(fileNameIn)\nInFileName += [x.replace('\\n', '') for x in fo.readlines()]\nfo.close()\nfor bfMatFile in InFileName:\n if os.path.isfile(bfMatFile):\n lsHTMLName = []\n fo = open(bfMatFile)\n lsHTMLName += [x.replace('\\n', '') for x in fo.readlines()]\n fo.close()\n bfRow = ''\n for rowHTML in lsHTMLName:\n iPosic = rowHTML.find('<td><p>')\n if iPosic > 0:\n bfRowPart = rowHTML[iPosic + len('<td><p>'):]\n bfRow += (bfRowPart[:bfRowPart.index('</p></td>')] + ','\n ).replace(' ', ',').strip()\n if bfRow != '':\n lsOutTmp.append(bfRow[:len(bfRow) - 1] + ';')\nbufferTmp = '\\n'\nbufferTmp = bufferTmp.join(lsOutTmp)\nfo = open(fileNameOu, 'w')\nfo.write(bufferTmp)\nfo.close()\n",
"step-5": "#C:\\utils\\Python\\Python27\\python.exe incompletosClean.py incompletos\\inc.dat incompletos\\out.dat\r\n\r\nimport sys\r\nimport os\r\nimport os.path\r\n\r\nbfTmp = ''\r\nlsOutTmp = []\r\nInFileName = []\r\nlsHTMLName = []\r\n\r\nfileNameIn= sys.argv[1]\r\nfileNameOu= sys.argv[2]\r\n\r\nfo = open(fileNameIn)\r\nInFileName += [x.replace('\\n', '') for x in fo.readlines()]\r\nfo.close()\r\n\r\nfor bfMatFile in InFileName:\r\n if os.path.isfile(bfMatFile):\r\n lsHTMLName = []\r\n fo = open(bfMatFile)\r\n lsHTMLName += [x.replace('\\n', '') for x in fo.readlines()]\r\n fo.close()\r\n\r\n bfRow = ''\r\n for rowHTML in lsHTMLName:\r\n iPosic = rowHTML.find('<td><p>')\r\n if iPosic > 0:\r\n bfRowPart = rowHTML[iPosic + len('<td><p>'):]\r\n bfRow += ((bfRowPart[:bfRowPart.index('</p></td>')] + ',').replace(' ', ',')).strip()\r\n\r\n if bfRow != '':\r\n lsOutTmp.append(bfRow[:len(bfRow)-1] + ';')\r\n\r\nbufferTmp = '\\n'\r\nbufferTmp = bufferTmp.join(lsOutTmp)\r\nfo= open(fileNameOu, 'w')\r\nfo.write(bufferTmp)\r\nfo.close()\r\n\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in range(1, n + 1):
now = p[i]
m = min(m, now)
if now == m:
cnt += 1
print(cnt)
<|reserved_special_token_1|>
n = int(input())
p = [220000] + list(map(int, input().split()))
cnt = 0
m = 220000
for i in range(1, n + 1):
now = p[i]
m = min(m, now)
if now == m:
cnt += 1
print(cnt)
|
flexible
|
{
"blob_id": "2a500968cf6786440c0d4240430433db90d1fc2f",
"index": 5941,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(1, n + 1):\n now = p[i]\n m = min(m, now)\n if now == m:\n cnt += 1\nprint(cnt)\n",
"step-3": "n = int(input())\np = [220000] + list(map(int, input().split()))\ncnt = 0\nm = 220000\nfor i in range(1, n + 1):\n now = p[i]\n m = min(m, now)\n if now == m:\n cnt += 1\nprint(cnt)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#cerner_2^5_2019
#Mason Seeger submission 1
from random import randint as r
import operator as o
#Only works with valid integers. A function for quick math brain training.
def randomMath():
correct = 0
while(correct<10):
str_ops = ['+', '-', '*', '/', '%']
ops = {'+': o.add, '-': o.sub, '*': o.mul, '/': o.floordiv, '%': o.mod}
x = r(1,10)
y = r(1,10)
op = str_ops[r(0,4)]
inp = input(str(x) + op + str(y) + '=')
if int(inp) == ops[op](x, y):
correct+=1
print("Correct! Only " + str(10-correct) + ' correct answers to go!')
else:
print("Wrong! " + str(10-correct) + ' correct answers to go!')
print("Congrats!! Good brain training.")
randomMath()
|
normal
|
{
"blob_id": "12f035962925c5380c782e8fad23f16fe9fb9435",
"index": 5311,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef randomMath():\n correct = 0\n while correct < 10:\n str_ops = ['+', '-', '*', '/', '%']\n ops = {'+': o.add, '-': o.sub, '*': o.mul, '/': o.floordiv, '%': o.mod}\n x = r(1, 10)\n y = r(1, 10)\n op = str_ops[r(0, 4)]\n inp = input(str(x) + op + str(y) + '=')\n if int(inp) == ops[op](x, y):\n correct += 1\n print('Correct! Only ' + str(10 - correct) +\n ' correct answers to go!')\n else:\n print('Wrong! ' + str(10 - correct) + ' correct answers to go!')\n print('Congrats!! Good brain training.')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef randomMath():\n correct = 0\n while correct < 10:\n str_ops = ['+', '-', '*', '/', '%']\n ops = {'+': o.add, '-': o.sub, '*': o.mul, '/': o.floordiv, '%': o.mod}\n x = r(1, 10)\n y = r(1, 10)\n op = str_ops[r(0, 4)]\n inp = input(str(x) + op + str(y) + '=')\n if int(inp) == ops[op](x, y):\n correct += 1\n print('Correct! Only ' + str(10 - correct) +\n ' correct answers to go!')\n else:\n print('Wrong! ' + str(10 - correct) + ' correct answers to go!')\n print('Congrats!! Good brain training.')\n\n\nrandomMath()\n",
"step-4": "from random import randint as r\nimport operator as o\n\n\ndef randomMath():\n correct = 0\n while correct < 10:\n str_ops = ['+', '-', '*', '/', '%']\n ops = {'+': o.add, '-': o.sub, '*': o.mul, '/': o.floordiv, '%': o.mod}\n x = r(1, 10)\n y = r(1, 10)\n op = str_ops[r(0, 4)]\n inp = input(str(x) + op + str(y) + '=')\n if int(inp) == ops[op](x, y):\n correct += 1\n print('Correct! Only ' + str(10 - correct) +\n ' correct answers to go!')\n else:\n print('Wrong! ' + str(10 - correct) + ' correct answers to go!')\n print('Congrats!! Good brain training.')\n\n\nrandomMath()\n",
"step-5": "#cerner_2^5_2019\n#Mason Seeger submission 1\n\nfrom random import randint as r\nimport operator as o\n\n#Only works with valid integers. A function for quick math brain training.\ndef randomMath():\n correct = 0\n while(correct<10):\n str_ops = ['+', '-', '*', '/', '%']\n ops = {'+': o.add, '-': o.sub, '*': o.mul, '/': o.floordiv, '%': o.mod}\n x = r(1,10)\n y = r(1,10)\n op = str_ops[r(0,4)]\n\n inp = input(str(x) + op + str(y) + '=')\n if int(inp) == ops[op](x, y):\n correct+=1\n print(\"Correct! Only \" + str(10-correct) + ' correct answers to go!')\n else:\n print(\"Wrong! \" + str(10-correct) + ' correct answers to go!')\n\n print(\"Congrats!! Good brain training.\")\n\nrandomMath()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from . import common_wizard
|
flexible
|
{
"blob_id": "1844cfb3e174454e0e95d91e4e55679caddcd56e",
"index": 1963,
"step-1": "<mask token>\n",
"step-2": "from . import common_wizard\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from urllib.request import urlopen
from json import loads
with urlopen('http://api.nbp.pl/api/exchangerates/tables/A/') as site:
data = loads(site.read().decode('utf-8'))
rates = data[0]['rates']
exchange = input('Jaką wartość chcesz wymienić na złotówki? ')
value, code = exchange.split(' ')
value = float(value)
rate = list(filter(lambda x: x['code'] == code, rates))
print(f'Otrzymujesz {value * rate[0]["mid"]} PLN')
|
normal
|
{
"blob_id": "3f3d7cdf7732b2a1568cd97574e1443225667327",
"index": 9622,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith urlopen('http://api.nbp.pl/api/exchangerates/tables/A/') as site:\n data = loads(site.read().decode('utf-8'))\n rates = data[0]['rates']\n exchange = input('Jaką wartość chcesz wymienić na złotówki? ')\n value, code = exchange.split(' ')\n value = float(value)\n rate = list(filter(lambda x: x['code'] == code, rates))\n print(f\"Otrzymujesz {value * rate[0]['mid']} PLN\")\n",
"step-3": "from urllib.request import urlopen\nfrom json import loads\nwith urlopen('http://api.nbp.pl/api/exchangerates/tables/A/') as site:\n data = loads(site.read().decode('utf-8'))\n rates = data[0]['rates']\n exchange = input('Jaką wartość chcesz wymienić na złotówki? ')\n value, code = exchange.split(' ')\n value = float(value)\n rate = list(filter(lambda x: x['code'] == code, rates))\n print(f\"Otrzymujesz {value * rate[0]['mid']} PLN\")\n",
"step-4": "from urllib.request import urlopen\nfrom json import loads\n\n\nwith urlopen('http://api.nbp.pl/api/exchangerates/tables/A/') as site:\n data = loads(site.read().decode('utf-8'))\n rates = data[0]['rates']\n\n exchange = input('Jaką wartość chcesz wymienić na złotówki? ')\n value, code = exchange.split(' ')\n value = float(value)\n\n rate = list(filter(lambda x: x['code'] == code, rates))\n print(f'Otrzymujesz {value * rate[0][\"mid\"]} PLN')\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@app.post('/users/', response_model=schemas.UserCreate)
def create_user(user: schemas.UserCreate, db: Session=Depends(get_db)):
db_user = crud.get_user_by_mail(db, user.mail)
if db_user:
raise HTTPException(status_code=400, detail='Email already registered')
return crud.create_user(db=db, user=user)
@app.get('/users/{user_id}', response_model=schemas.User)
def read_user(user_id: int, db: Session=Depends(get_db)):
db_user = crud.get_user(db, user_id)
if db_user is None:
raise HTTPException(status_code=404, detail='User not found')
return db_user
@app.delete('/users/{user_id}', response_model=schemas.User)
def delete_user(user_id: int, db: Session=Depends(get_db)):
db_user = crud.delete_user(user_id)
if db_user is None:
raise HTTPException(status_code=404, detail='User not found')
return db_user
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@app.post('/users/', response_model=schemas.UserCreate)
def create_user(user: schemas.UserCreate, db: Session=Depends(get_db)):
db_user = crud.get_user_by_mail(db, user.mail)
if db_user:
raise HTTPException(status_code=400, detail='Email already registered')
return crud.create_user(db=db, user=user)
@app.get('/users/{user_id}', response_model=schemas.User)
def read_user(user_id: int, db: Session=Depends(get_db)):
db_user = crud.get_user(db, user_id)
if db_user is None:
raise HTTPException(status_code=404, detail='User not found')
return db_user
@app.delete('/users/{user_id}', response_model=schemas.User)
def delete_user(user_id: int, db: Session=Depends(get_db)):
db_user = crud.delete_user(user_id)
if db_user is None:
raise HTTPException(status_code=404, detail='User not found')
return db_user
@app.get('/demand', response_model=schemas.Demand)
def place_demand(demand: schemas.DemandCreate, db: Session=Depends(get_db)):
db_demand = crud.get_active_demand_user(db, demand.user_id)
if db_demand:
raise HTTPException(status_code=400, detail=
'The user already has an open demand')
db_demand = crud.create_demand(db, demand)
return db_demand
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
models.Base.metadata.create_all(bind=engine)
<|reserved_special_token_0|>
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@app.post('/users/', response_model=schemas.UserCreate)
def create_user(user: schemas.UserCreate, db: Session=Depends(get_db)):
db_user = crud.get_user_by_mail(db, user.mail)
if db_user:
raise HTTPException(status_code=400, detail='Email already registered')
return crud.create_user(db=db, user=user)
@app.get('/users/{user_id}', response_model=schemas.User)
def read_user(user_id: int, db: Session=Depends(get_db)):
db_user = crud.get_user(db, user_id)
if db_user is None:
raise HTTPException(status_code=404, detail='User not found')
return db_user
@app.delete('/users/{user_id}', response_model=schemas.User)
def delete_user(user_id: int, db: Session=Depends(get_db)):
db_user = crud.delete_user(user_id)
if db_user is None:
raise HTTPException(status_code=404, detail='User not found')
return db_user
@app.get('/demand', response_model=schemas.Demand)
def place_demand(demand: schemas.DemandCreate, db: Session=Depends(get_db)):
db_demand = crud.get_active_demand_user(db, demand.user_id)
if db_demand:
raise HTTPException(status_code=400, detail=
'The user already has an open demand')
db_demand = crud.create_demand(db, demand)
return db_demand
if __name__ == '__main__':
uvicorn.run(app, host='0.0.0.0', port=8000)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
models.Base.metadata.create_all(bind=engine)
app = FastAPI()
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@app.post('/users/', response_model=schemas.UserCreate)
def create_user(user: schemas.UserCreate, db: Session=Depends(get_db)):
db_user = crud.get_user_by_mail(db, user.mail)
if db_user:
raise HTTPException(status_code=400, detail='Email already registered')
return crud.create_user(db=db, user=user)
@app.get('/users/{user_id}', response_model=schemas.User)
def read_user(user_id: int, db: Session=Depends(get_db)):
db_user = crud.get_user(db, user_id)
if db_user is None:
raise HTTPException(status_code=404, detail='User not found')
return db_user
@app.delete('/users/{user_id}', response_model=schemas.User)
def delete_user(user_id: int, db: Session=Depends(get_db)):
db_user = crud.delete_user(user_id)
if db_user is None:
raise HTTPException(status_code=404, detail='User not found')
return db_user
@app.get('/demand', response_model=schemas.Demand)
def place_demand(demand: schemas.DemandCreate, db: Session=Depends(get_db)):
db_demand = crud.get_active_demand_user(db, demand.user_id)
if db_demand:
raise HTTPException(status_code=400, detail=
'The user already has an open demand')
db_demand = crud.create_demand(db, demand)
return db_demand
if __name__ == '__main__':
uvicorn.run(app, host='0.0.0.0', port=8000)
<|reserved_special_token_1|>
from typing import List
import uvicorn
from fastapi import Depends, FastAPI, HTTPException
from sqlalchemy.orm import Session
from . import crud, models, schemas
from .database import SessionLocal, engine
models.Base.metadata.create_all(bind=engine)
app = FastAPI()
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@app.post("/users/", response_model=schemas.UserCreate)
def create_user(user: schemas.UserCreate, db: Session = Depends(get_db)):
db_user = crud.get_user_by_mail(db, user.mail)
if db_user:
raise HTTPException(status_code=400, detail="Email already registered")
return crud.create_user(db=db, user=user)
@app.get("/users/{user_id}", response_model=schemas.User)
def read_user(user_id: int, db: Session = Depends(get_db)):
db_user = crud.get_user(db, user_id)
if db_user is None:
raise HTTPException(status_code=404, detail = "User not found")
return db_user
@app.delete("/users/{user_id}", response_model=schemas.User)
def delete_user(user_id: int, db: Session = Depends(get_db)):
db_user = crud.delete_user(user_id)
if db_user is None:
raise HTTPException(status_code=404, detail="User not found")
return db_user
# @app.post("/cars/", response_model=schemas.Car)
# def create_user(car: schemas.CarCreate, db: Session = Depends(get_db)):
#
# if db_car:
# raise HTTPException(status_code=400, detail="License already registered")
# return crud.create_car(db=db, car=car)
@app.get("/demand", response_model=schemas.Demand)
def place_demand(demand: schemas.DemandCreate, db: Session = Depends(get_db)):
db_demand = crud.get_active_demand_user(db, demand.user_id)
if db_demand:
raise HTTPException(status_code=400, detail="The user already has an open demand")
db_demand = crud.create_demand(db, demand)
#ToDo Trigger schedular
return db_demand
if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000)
|
flexible
|
{
"blob_id": "5961c593b46a8d3a0f7c62d862cce9a2814e42f4",
"index": 9019,
"step-1": "<mask token>\n\n\ndef get_db():\n db = SessionLocal()\n try:\n yield db\n finally:\n db.close()\n\n\[email protected]('/users/', response_model=schemas.UserCreate)\ndef create_user(user: schemas.UserCreate, db: Session=Depends(get_db)):\n db_user = crud.get_user_by_mail(db, user.mail)\n if db_user:\n raise HTTPException(status_code=400, detail='Email already registered')\n return crud.create_user(db=db, user=user)\n\n\[email protected]('/users/{user_id}', response_model=schemas.User)\ndef read_user(user_id: int, db: Session=Depends(get_db)):\n db_user = crud.get_user(db, user_id)\n if db_user is None:\n raise HTTPException(status_code=404, detail='User not found')\n return db_user\n\n\[email protected]('/users/{user_id}', response_model=schemas.User)\ndef delete_user(user_id: int, db: Session=Depends(get_db)):\n db_user = crud.delete_user(user_id)\n if db_user is None:\n raise HTTPException(status_code=404, detail='User not found')\n return db_user\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_db():\n db = SessionLocal()\n try:\n yield db\n finally:\n db.close()\n\n\[email protected]('/users/', response_model=schemas.UserCreate)\ndef create_user(user: schemas.UserCreate, db: Session=Depends(get_db)):\n db_user = crud.get_user_by_mail(db, user.mail)\n if db_user:\n raise HTTPException(status_code=400, detail='Email already registered')\n return crud.create_user(db=db, user=user)\n\n\[email protected]('/users/{user_id}', response_model=schemas.User)\ndef read_user(user_id: int, db: Session=Depends(get_db)):\n db_user = crud.get_user(db, user_id)\n if db_user is None:\n raise HTTPException(status_code=404, detail='User not found')\n return db_user\n\n\[email protected]('/users/{user_id}', response_model=schemas.User)\ndef delete_user(user_id: int, db: Session=Depends(get_db)):\n db_user = crud.delete_user(user_id)\n if db_user is None:\n raise HTTPException(status_code=404, detail='User not found')\n return db_user\n\n\[email protected]('/demand', response_model=schemas.Demand)\ndef place_demand(demand: schemas.DemandCreate, db: Session=Depends(get_db)):\n db_demand = crud.get_active_demand_user(db, demand.user_id)\n if db_demand:\n raise HTTPException(status_code=400, detail=\n 'The user already has an open demand')\n db_demand = crud.create_demand(db, demand)\n return db_demand\n\n\n<mask token>\n",
"step-3": "<mask token>\nmodels.Base.metadata.create_all(bind=engine)\n<mask token>\n\n\ndef get_db():\n db = SessionLocal()\n try:\n yield db\n finally:\n db.close()\n\n\[email protected]('/users/', response_model=schemas.UserCreate)\ndef create_user(user: schemas.UserCreate, db: Session=Depends(get_db)):\n db_user = crud.get_user_by_mail(db, user.mail)\n if db_user:\n raise HTTPException(status_code=400, detail='Email already registered')\n return crud.create_user(db=db, user=user)\n\n\[email protected]('/users/{user_id}', response_model=schemas.User)\ndef read_user(user_id: int, db: Session=Depends(get_db)):\n db_user = crud.get_user(db, user_id)\n if db_user is None:\n raise HTTPException(status_code=404, detail='User not found')\n return db_user\n\n\[email protected]('/users/{user_id}', response_model=schemas.User)\ndef delete_user(user_id: int, db: Session=Depends(get_db)):\n db_user = crud.delete_user(user_id)\n if db_user is None:\n raise HTTPException(status_code=404, detail='User not found')\n return db_user\n\n\[email protected]('/demand', response_model=schemas.Demand)\ndef place_demand(demand: schemas.DemandCreate, db: Session=Depends(get_db)):\n db_demand = crud.get_active_demand_user(db, demand.user_id)\n if db_demand:\n raise HTTPException(status_code=400, detail=\n 'The user already has an open demand')\n db_demand = crud.create_demand(db, demand)\n return db_demand\n\n\nif __name__ == '__main__':\n uvicorn.run(app, host='0.0.0.0', port=8000)\n",
"step-4": "<mask token>\nmodels.Base.metadata.create_all(bind=engine)\napp = FastAPI()\n\n\ndef get_db():\n db = SessionLocal()\n try:\n yield db\n finally:\n db.close()\n\n\[email protected]('/users/', response_model=schemas.UserCreate)\ndef create_user(user: schemas.UserCreate, db: Session=Depends(get_db)):\n db_user = crud.get_user_by_mail(db, user.mail)\n if db_user:\n raise HTTPException(status_code=400, detail='Email already registered')\n return crud.create_user(db=db, user=user)\n\n\[email protected]('/users/{user_id}', response_model=schemas.User)\ndef read_user(user_id: int, db: Session=Depends(get_db)):\n db_user = crud.get_user(db, user_id)\n if db_user is None:\n raise HTTPException(status_code=404, detail='User not found')\n return db_user\n\n\[email protected]('/users/{user_id}', response_model=schemas.User)\ndef delete_user(user_id: int, db: Session=Depends(get_db)):\n db_user = crud.delete_user(user_id)\n if db_user is None:\n raise HTTPException(status_code=404, detail='User not found')\n return db_user\n\n\[email protected]('/demand', response_model=schemas.Demand)\ndef place_demand(demand: schemas.DemandCreate, db: Session=Depends(get_db)):\n db_demand = crud.get_active_demand_user(db, demand.user_id)\n if db_demand:\n raise HTTPException(status_code=400, detail=\n 'The user already has an open demand')\n db_demand = crud.create_demand(db, demand)\n return db_demand\n\n\nif __name__ == '__main__':\n uvicorn.run(app, host='0.0.0.0', port=8000)\n",
"step-5": "from typing import List\n\nimport uvicorn\nfrom fastapi import Depends, FastAPI, HTTPException\nfrom sqlalchemy.orm import Session\n\nfrom . import crud, models, schemas\nfrom .database import SessionLocal, engine\n\nmodels.Base.metadata.create_all(bind=engine)\n\napp = FastAPI()\n\ndef get_db():\n db = SessionLocal()\n\n try:\n yield db\n finally:\n db.close()\n\[email protected](\"/users/\", response_model=schemas.UserCreate)\ndef create_user(user: schemas.UserCreate, db: Session = Depends(get_db)):\n db_user = crud.get_user_by_mail(db, user.mail)\n if db_user:\n raise HTTPException(status_code=400, detail=\"Email already registered\")\n return crud.create_user(db=db, user=user)\n\[email protected](\"/users/{user_id}\", response_model=schemas.User)\ndef read_user(user_id: int, db: Session = Depends(get_db)):\n db_user = crud.get_user(db, user_id)\n\n if db_user is None:\n raise HTTPException(status_code=404, detail = \"User not found\")\n return db_user\n\[email protected](\"/users/{user_id}\", response_model=schemas.User)\ndef delete_user(user_id: int, db: Session = Depends(get_db)):\n db_user = crud.delete_user(user_id)\n\n if db_user is None:\n raise HTTPException(status_code=404, detail=\"User not found\")\n\n return db_user\n\n\n\n# @app.post(\"/cars/\", response_model=schemas.Car)\n# def create_user(car: schemas.CarCreate, db: Session = Depends(get_db)):\n#\n# if db_car:\n# raise HTTPException(status_code=400, detail=\"License already registered\")\n# return crud.create_car(db=db, car=car)\n\n\[email protected](\"/demand\", response_model=schemas.Demand)\ndef place_demand(demand: schemas.DemandCreate, db: Session = Depends(get_db)):\n db_demand = crud.get_active_demand_user(db, demand.user_id)\n if db_demand:\n raise HTTPException(status_code=400, detail=\"The user already has an open demand\")\n\n db_demand = crud.create_demand(db, demand)\n\n #ToDo Trigger schedular\n\n return db_demand\n\nif __name__ == \"__main__\":\n uvicorn.run(app, host=\"0.0.0.0\", port=8000)",
"step-ids": [
4,
5,
6,
7,
9
]
}
|
[
4,
5,
6,
7,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
root = tk.Tk()
root.resizable(width=False, height=False)
root.title('Tugas Algoritma')
canvas = tk.Canvas(root, height=500, width=800)
canvas.pack()
bg = tk.PhotoImage(file='bg.png')
bl = tk.Label(root, image=bg)
bl.place(relwidth=1, relheight=1)
def about():
messagebox.showinfo('About', 'Simulasi Algoritma Sorting')
def help_box(event=None):
messagebox.showinfo('Help', 'For help email to [email protected]',
icon='question')
def exit_editor():
if messagebox.askokcancel('Quti', 'Do you really want to quit?'):
root.destroy()
root.protocol('WM_DELETE_WINDOW', exit_editor)
def donothing():
print('Nothing to do :v')
def bbsort(angka):
for i in range(len(angka) - 1, 0, -1):
for j in range(i):
if angka[j] > angka[j + 1]:
tampung = angka[j]
angka[j] = angka[j + 1]
angka[j + 1] = tampung
def sssort(angka):
for i in range(len(angka)):
min_idx = i
for j in range(i, len(angka)):
if angka[j] < angka[min_idx]:
min_idx = j
tampung = angka[i]
angka[i] = angka[min_idx]
angka[min_idx] = tampung
def issort(angka):
for i in range(1, len(angka)):
idx = angka[i]
for j in range(i - 1, 0, -1):
if angka[j] > idx:
angka[j + 1] = angka[j]
else:
angka[j + 1] = idx
break
def bisort(angka):
for i in range(len(angka)):
min_idx = i
for j in range(i, len(angka)):
if angka[j] < angka[min_idx]:
min_idx = j
tampung = angka[i]
angka[i] = angka[min_idx]
angka[min_idx] = tampung
def bbs_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print('Data Lu mane?')
else:
print(
'#################################################### BUBLE SORT '
)
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print('Data Sample \n', angka)
start = timeit.default_timer()
bbsort(angka)
stop = timeit.default_timer()
runtime = stop - start
print('\n Hasil Sorting \n', angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
bbs_time.config(text='% .12f' % runtime)
bbs_time.place(x=420, y=185)
def iss_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print('Data Lu mane?')
else:
print(
'#################################################### INSERTION SORT '
)
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print('Data Sample \n', angka)
start = timeit.default_timer()
issort(angka)
stop = timeit.default_timer()
runtime = stop - start
print('\n Hasil Sorting \n', angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
iss_time.config(text='% .12f' % runtime)
iss_time.place(x=545, y=185)
def sss_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print('Data Lu mane?')
else:
print(
'#################################################### SELECTION SORT '
)
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print('Data Sample \n', angka)
start = timeit.default_timer()
sssort(angka)
stop = timeit.default_timer()
runtime = stop - start
print('\n Hasil Sorting \n', angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
sss_time.config(text='% .12f' % runtime)
sss_time.place(x=670, y=185)
def bsi_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print('Data Lu mane?')
else:
print('#################################################### BSI')
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print('Data Sample \n', angka)
start = timeit.default_timer()
bisort(angka)
stop = timeit.default_timer()
runtime = stop - start
print('\n Hasil Sorting \n', angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
bsi_time.config(text='% .12f' % runtime)
bsi_time.place(x=570, y=333)
def generate(entry):
listbox_widget.delete(0, tk.END)
l = int(entry)
listrandom = []
for i in range(l):
value = random.randint(1, 1000)
listrandom.append(value)
listbox_widget.insert(tk.END, value)
angka = listrandom
def cls():
hasil_listbox_widget.delete(0, tk.END)
print('\n' * 100)
menubar = Menu(root)
filemenu = Menu(menubar, tearoff=0)
filemenu.add_command(label='Generate Random Number', command=donothing)
filemenu.add_command(label='Close', command=exit_editor)
filemenu.add_separator()
filemenu.add_command(label='Exit', command=root.quit)
menubar.add_cascade(label='File', menu=filemenu)
aboutmenu = Menu(menubar, tearoff=0)
menubar.add_cascade(label='About', menu=aboutmenu)
aboutmenu.add_command(label='About', command=about)
aboutmenu.add_command(label='Help', command=help_box)
root.config(menu=menubar)
frame_data = tk.Frame(root)
frame_data.place(relx=0.128, rely=0.14, relwidth=0.18, relheight=0.65,
anchor='n')
listbox_widget = tk.Listbox(frame_data, selectmode='BROWSE', height=20,
width=20, background='white')
listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)
listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)
listbox_widget_scrl.configure(command=listbox_widget.yview)
listbox_widget.grid(row=1, sticky=W)
listbox_widget_scrl.grid(row=1, column=1, sticky=NS)
frame_hasil = tk.Frame(root)
frame_hasil.place(relx=0.34, rely=0.14, relwidth=0.18, relheight=0.65,
anchor='n')
hasil_listbox_widget = tk.Listbox(frame_hasil, selectmode='BROWSE',
height=20, width=20, background='white')
hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)
hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)
hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)
hasil_listbox_widget.grid(row=1, sticky=W)
hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)
entry = tk.Entry(root, font=40, width=7)
entry.place(x=105, y=450)
bbs_button = tk.Button(root, text='START', font=40, command=bbs_respon
).place(x=434, y=140)
iss_button = tk.Button(root, text='START', font=40, command=iss_respon
).place(x=555, y=140)
sss_button = tk.Button(root, text='START', font=40, command=sss_respon
).place(x=680, y=140)
bsi_button = tk.Button(root, text='START', font=40, command=bsi_respon
).place(x=466, y=330)
gen_button = tk.Button(root, text='GENERATE', font=40, command=lambda :
generate(entry.get()))
gen_button.place(x=180, y=447)
cls_button = tk.Button(root, text='CLEAN', font=40, command=cls).place(x
=295, y=447)
bbs_time = ttk.Label(root, background='#6367c8', foreground='#fff')
bbs_time['text'] = 'Respon Time'
bbs_time.place(x=429, y=185)
iss_time = tk.Label(root, background='#6367c8', foreground='#fff')
iss_time['text'] = 'Respon Time'
iss_time.place(x=555, y=185)
sss_time = tk.Label(root, background='#6367c8', foreground='#fff')
sss_time['text'] = 'Respon Time'
sss_time.place(x=680, y=185)
bsi_time = tk.Label(root, background='#6367c8', font=40, foreground='#fff')
bsi_time['text'] = 'Respon Time'
bsi_time.place(x=570, y=333)
root.mainloop()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
root = tk.Tk()
root.resizable(width=False, height=False)
root.title('Tugas Algoritma')
canvas = tk.Canvas(root, height=500, width=800)
canvas.pack()
bg = tk.PhotoImage(file='bg.png')
bl = tk.Label(root, image=bg)
bl.place(relwidth=1, relheight=1)
def about():
messagebox.showinfo('About', 'Simulasi Algoritma Sorting')
def help_box(event=None):
messagebox.showinfo('Help', 'For help email to [email protected]',
icon='question')
def exit_editor():
if messagebox.askokcancel('Quti', 'Do you really want to quit?'):
root.destroy()
root.protocol('WM_DELETE_WINDOW', exit_editor)
def donothing():
print('Nothing to do :v')
def bbsort(angka):
for i in range(len(angka) - 1, 0, -1):
for j in range(i):
if angka[j] > angka[j + 1]:
tampung = angka[j]
angka[j] = angka[j + 1]
angka[j + 1] = tampung
def sssort(angka):
for i in range(len(angka)):
min_idx = i
for j in range(i, len(angka)):
if angka[j] < angka[min_idx]:
min_idx = j
tampung = angka[i]
angka[i] = angka[min_idx]
angka[min_idx] = tampung
def issort(angka):
for i in range(1, len(angka)):
idx = angka[i]
for j in range(i - 1, 0, -1):
if angka[j] > idx:
angka[j + 1] = angka[j]
else:
angka[j + 1] = idx
break
def bisort(angka):
for i in range(len(angka)):
min_idx = i
for j in range(i, len(angka)):
if angka[j] < angka[min_idx]:
min_idx = j
tampung = angka[i]
angka[i] = angka[min_idx]
angka[min_idx] = tampung
def bbs_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print('Data Lu mane?')
else:
print(
'#################################################### BUBLE SORT '
)
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print('Data Sample \n', angka)
start = timeit.default_timer()
bbsort(angka)
stop = timeit.default_timer()
runtime = stop - start
print('\n Hasil Sorting \n', angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
bbs_time.config(text='% .12f' % runtime)
bbs_time.place(x=420, y=185)
def iss_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print('Data Lu mane?')
else:
print(
'#################################################### INSERTION SORT '
)
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print('Data Sample \n', angka)
start = timeit.default_timer()
issort(angka)
stop = timeit.default_timer()
runtime = stop - start
print('\n Hasil Sorting \n', angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
iss_time.config(text='% .12f' % runtime)
iss_time.place(x=545, y=185)
def sss_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print('Data Lu mane?')
else:
print(
'#################################################### SELECTION SORT '
)
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print('Data Sample \n', angka)
start = timeit.default_timer()
sssort(angka)
stop = timeit.default_timer()
runtime = stop - start
print('\n Hasil Sorting \n', angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
sss_time.config(text='% .12f' % runtime)
sss_time.place(x=670, y=185)
def bsi_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print('Data Lu mane?')
else:
print('#################################################### BSI')
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print('Data Sample \n', angka)
start = timeit.default_timer()
bisort(angka)
stop = timeit.default_timer()
runtime = stop - start
print('\n Hasil Sorting \n', angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
bsi_time.config(text='% .12f' % runtime)
bsi_time.place(x=570, y=333)
def generate(entry):
listbox_widget.delete(0, tk.END)
l = int(entry)
listrandom = []
for i in range(l):
value = random.randint(1, 1000)
listrandom.append(value)
listbox_widget.insert(tk.END, value)
angka = listrandom
def cls():
hasil_listbox_widget.delete(0, tk.END)
print('\n' * 100)
menubar = Menu(root)
filemenu = Menu(menubar, tearoff=0)
filemenu.add_command(label='Generate Random Number', command=donothing)
filemenu.add_command(label='Close', command=exit_editor)
filemenu.add_separator()
filemenu.add_command(label='Exit', command=root.quit)
menubar.add_cascade(label='File', menu=filemenu)
aboutmenu = Menu(menubar, tearoff=0)
menubar.add_cascade(label='About', menu=aboutmenu)
aboutmenu.add_command(label='About', command=about)
aboutmenu.add_command(label='Help', command=help_box)
root.config(menu=menubar)
frame_data = tk.Frame(root)
frame_data.place(relx=0.128, rely=0.14, relwidth=0.18, relheight=0.65,
anchor='n')
listbox_widget = tk.Listbox(frame_data, selectmode='BROWSE', height=20,
width=20, background='white')
listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)
listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)
listbox_widget_scrl.configure(command=listbox_widget.yview)
listbox_widget.grid(row=1, sticky=W)
listbox_widget_scrl.grid(row=1, column=1, sticky=NS)
frame_hasil = tk.Frame(root)
frame_hasil.place(relx=0.34, rely=0.14, relwidth=0.18, relheight=0.65,
anchor='n')
hasil_listbox_widget = tk.Listbox(frame_hasil, selectmode='BROWSE',
height=20, width=20, background='white')
hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)
hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)
hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)
hasil_listbox_widget.grid(row=1, sticky=W)
hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)
entry = tk.Entry(root, font=40, width=7)
entry.place(x=105, y=450)
bbs_button = tk.Button(root, text='START', font=40, command=bbs_respon
).place(x=434, y=140)
iss_button = tk.Button(root, text='START', font=40, command=iss_respon
).place(x=555, y=140)
sss_button = tk.Button(root, text='START', font=40, command=sss_respon
).place(x=680, y=140)
bsi_button = tk.Button(root, text='START', font=40, command=bsi_respon
).place(x=466, y=330)
gen_button = tk.Button(root, text='GENERATE', font=40, command=lambda :
generate(entry.get()))
gen_button.place(x=180, y=447)
cls_button = tk.Button(root, text='CLEAN', font=40, command=cls).place(x
=295, y=447)
bbs_time = ttk.Label(root, background='#6367c8', foreground='#fff')
bbs_time['text'] = 'Respon Time'
bbs_time.place(x=429, y=185)
iss_time = tk.Label(root, background='#6367c8', foreground='#fff')
iss_time['text'] = 'Respon Time'
iss_time.place(x=555, y=185)
sss_time = tk.Label(root, background='#6367c8', foreground='#fff')
sss_time['text'] = 'Respon Time'
sss_time.place(x=680, y=185)
bsi_time = tk.Label(root, background='#6367c8', font=40, foreground='#fff')
bsi_time['text'] = 'Respon Time'
bsi_time.place(x=570, y=333)
root.mainloop()
main()
<|reserved_special_token_1|>
from tkinter import *
import tkinter as tk
from tkinter import ttk
from tkinter import messagebox
import random
import numpy as np
import timeit
def main():
root = tk.Tk()
root.resizable(width=False, height=False)
root.title('Tugas Algoritma')
canvas = tk.Canvas(root, height=500, width=800)
canvas.pack()
bg = tk.PhotoImage(file='bg.png')
bl = tk.Label(root, image=bg)
bl.place(relwidth=1, relheight=1)
def about():
messagebox.showinfo('About', 'Simulasi Algoritma Sorting')
def help_box(event=None):
messagebox.showinfo('Help', 'For help email to [email protected]',
icon='question')
def exit_editor():
if messagebox.askokcancel('Quti', 'Do you really want to quit?'):
root.destroy()
root.protocol('WM_DELETE_WINDOW', exit_editor)
def donothing():
print('Nothing to do :v')
def bbsort(angka):
for i in range(len(angka) - 1, 0, -1):
for j in range(i):
if angka[j] > angka[j + 1]:
tampung = angka[j]
angka[j] = angka[j + 1]
angka[j + 1] = tampung
def sssort(angka):
for i in range(len(angka)):
min_idx = i
for j in range(i, len(angka)):
if angka[j] < angka[min_idx]:
min_idx = j
tampung = angka[i]
angka[i] = angka[min_idx]
angka[min_idx] = tampung
def issort(angka):
for i in range(1, len(angka)):
idx = angka[i]
for j in range(i - 1, 0, -1):
if angka[j] > idx:
angka[j + 1] = angka[j]
else:
angka[j + 1] = idx
break
def bisort(angka):
for i in range(len(angka)):
min_idx = i
for j in range(i, len(angka)):
if angka[j] < angka[min_idx]:
min_idx = j
tampung = angka[i]
angka[i] = angka[min_idx]
angka[min_idx] = tampung
def bbs_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print('Data Lu mane?')
else:
print(
'#################################################### BUBLE SORT '
)
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print('Data Sample \n', angka)
start = timeit.default_timer()
bbsort(angka)
stop = timeit.default_timer()
runtime = stop - start
print('\n Hasil Sorting \n', angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
bbs_time.config(text='% .12f' % runtime)
bbs_time.place(x=420, y=185)
def iss_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print('Data Lu mane?')
else:
print(
'#################################################### INSERTION SORT '
)
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print('Data Sample \n', angka)
start = timeit.default_timer()
issort(angka)
stop = timeit.default_timer()
runtime = stop - start
print('\n Hasil Sorting \n', angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
iss_time.config(text='% .12f' % runtime)
iss_time.place(x=545, y=185)
def sss_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print('Data Lu mane?')
else:
print(
'#################################################### SELECTION SORT '
)
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print('Data Sample \n', angka)
start = timeit.default_timer()
sssort(angka)
stop = timeit.default_timer()
runtime = stop - start
print('\n Hasil Sorting \n', angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
sss_time.config(text='% .12f' % runtime)
sss_time.place(x=670, y=185)
def bsi_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print('Data Lu mane?')
else:
print('#################################################### BSI')
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print('Data Sample \n', angka)
start = timeit.default_timer()
bisort(angka)
stop = timeit.default_timer()
runtime = stop - start
print('\n Hasil Sorting \n', angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
bsi_time.config(text='% .12f' % runtime)
bsi_time.place(x=570, y=333)
def generate(entry):
listbox_widget.delete(0, tk.END)
l = int(entry)
listrandom = []
for i in range(l):
value = random.randint(1, 1000)
listrandom.append(value)
listbox_widget.insert(tk.END, value)
angka = listrandom
def cls():
hasil_listbox_widget.delete(0, tk.END)
print('\n' * 100)
menubar = Menu(root)
filemenu = Menu(menubar, tearoff=0)
filemenu.add_command(label='Generate Random Number', command=donothing)
filemenu.add_command(label='Close', command=exit_editor)
filemenu.add_separator()
filemenu.add_command(label='Exit', command=root.quit)
menubar.add_cascade(label='File', menu=filemenu)
aboutmenu = Menu(menubar, tearoff=0)
menubar.add_cascade(label='About', menu=aboutmenu)
aboutmenu.add_command(label='About', command=about)
aboutmenu.add_command(label='Help', command=help_box)
root.config(menu=menubar)
frame_data = tk.Frame(root)
frame_data.place(relx=0.128, rely=0.14, relwidth=0.18, relheight=0.65,
anchor='n')
listbox_widget = tk.Listbox(frame_data, selectmode='BROWSE', height=20,
width=20, background='white')
listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)
listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)
listbox_widget_scrl.configure(command=listbox_widget.yview)
listbox_widget.grid(row=1, sticky=W)
listbox_widget_scrl.grid(row=1, column=1, sticky=NS)
frame_hasil = tk.Frame(root)
frame_hasil.place(relx=0.34, rely=0.14, relwidth=0.18, relheight=0.65,
anchor='n')
hasil_listbox_widget = tk.Listbox(frame_hasil, selectmode='BROWSE',
height=20, width=20, background='white')
hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)
hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)
hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)
hasil_listbox_widget.grid(row=1, sticky=W)
hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)
entry = tk.Entry(root, font=40, width=7)
entry.place(x=105, y=450)
bbs_button = tk.Button(root, text='START', font=40, command=bbs_respon
).place(x=434, y=140)
iss_button = tk.Button(root, text='START', font=40, command=iss_respon
).place(x=555, y=140)
sss_button = tk.Button(root, text='START', font=40, command=sss_respon
).place(x=680, y=140)
bsi_button = tk.Button(root, text='START', font=40, command=bsi_respon
).place(x=466, y=330)
gen_button = tk.Button(root, text='GENERATE', font=40, command=lambda :
generate(entry.get()))
gen_button.place(x=180, y=447)
cls_button = tk.Button(root, text='CLEAN', font=40, command=cls).place(x
=295, y=447)
bbs_time = ttk.Label(root, background='#6367c8', foreground='#fff')
bbs_time['text'] = 'Respon Time'
bbs_time.place(x=429, y=185)
iss_time = tk.Label(root, background='#6367c8', foreground='#fff')
iss_time['text'] = 'Respon Time'
iss_time.place(x=555, y=185)
sss_time = tk.Label(root, background='#6367c8', foreground='#fff')
sss_time['text'] = 'Respon Time'
sss_time.place(x=680, y=185)
bsi_time = tk.Label(root, background='#6367c8', font=40, foreground='#fff')
bsi_time['text'] = 'Respon Time'
bsi_time.place(x=570, y=333)
root.mainloop()
main()
<|reserved_special_token_1|>
from tkinter import *
import tkinter as tk
from tkinter import ttk
from tkinter import messagebox
import random
import numpy as np
import timeit
def main():
root = tk.Tk()
# root.geometry('800x500')
root.resizable(width=False, height=False)
root.title('Tugas Algoritma')
canvas = tk.Canvas(root, height=500, width=800)
canvas.pack()
bg = tk.PhotoImage(file='bg.png')
bl = tk.Label(root, image=bg)
bl.place(relwidth=1, relheight=1)
# root.iconbitmap('icons/pypad.ico')
########################################################################
def about():
messagebox.showinfo("About", "Simulasi Algoritma Sorting")
def help_box(event=None):
messagebox.showinfo(
"Help", "For help email to [email protected]", icon='question')
def exit_editor():
if messagebox.askokcancel("Quti", "Do you really want to quit?"):
root.destroy()
root.protocol('WM_DELETE_WINDOW', exit_editor)
def donothing():
print("Nothing to do :v")
########################################################################
# Bubble Sort
def bbsort(angka):
for i in range(len(angka)-1, 0, -1):
# swap = False
for j in range(i):
if angka[j] > angka[j+1]:
tampung = angka[j]
angka[j] = angka[j+1]
angka[j+1] = tampung
# swap = True
# if not swap:
# break
# Selection Sort
def sssort(angka):
for i in range(len(angka)):
min_idx = i
for j in range(i, len(angka)):
if angka[j] < angka[min_idx]:
min_idx = j
tampung = angka[i]
angka[i] = angka[min_idx]
angka[min_idx] = tampung
# Insertion Sort
def issort(angka):
for i in range(1, len(angka)):
idx = angka[i]
for j in range(i-1, 0, -1):
if angka[j] > idx:
angka[j+1] = angka[j]
else:
angka[j+1] = idx
break
# Bubble+Insertion+Selection Sort Optimize
def bisort(angka):
for i in range(len(angka)):
min_idx = i
for j in range(i, len(angka)):
if angka[j] < angka[min_idx]:
min_idx = j
tampung = angka[i]
angka[i] = angka[min_idx]
angka[min_idx] = tampung
########################################################################
def bbs_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print("Data Lu mane?")
else:
print("#################################################### BUBLE SORT ")
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print("Data Sample \n", angka)
start = timeit.default_timer()
bbsort(angka)
stop = timeit.default_timer()
runtime = stop - start
print("\n Hasil Sorting \n", angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
bbs_time.config(text="% .12f" % runtime)
bbs_time.place(x=420, y=185)
def iss_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print("Data Lu mane?")
else:
print("#################################################### INSERTION SORT ")
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print("Data Sample \n", angka)
start = timeit.default_timer()
issort(angka)
stop = timeit.default_timer()
runtime = stop - start
print("\n Hasil Sorting \n", angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
iss_time.config(text="% .12f" % runtime)
iss_time.place(x=545, y=185)
def sss_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print("Data Lu mane?")
else:
print("#################################################### SELECTION SORT ")
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print("Data Sample \n", angka)
start = timeit.default_timer()
sssort(angka)
stop = timeit.default_timer()
runtime = stop - start
print("\n Hasil Sorting \n", angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
sss_time.config(text="% .12f" % runtime)
sss_time.place(x=670, y=185)
def bsi_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print("Data Lu mane?")
else:
print("#################################################### BSI")
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print("Data Sample \n", angka)
start = timeit.default_timer()
bisort(angka)
stop = timeit.default_timer()
runtime = stop - start
print("\n Hasil Sorting \n", angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
bsi_time.config(text="% .12f" % runtime)
bsi_time.place(x=570, y=333)
def generate(entry):
listbox_widget.delete(0, tk.END)
l = int(entry)
listrandom = []
for i in range(l):
value = random.randint(1, 1000)
listrandom.append(value)
listbox_widget.insert(tk.END, value)
angka = listrandom
# print(listrandom)
# listbox_entries = random.sample(range(100), int(entry))
# for entry in listbox_entries:
# listbox_widget.insert(tk.END, entry)
#angka = listbox_widget.get(0, tk.END)
def cls():
hasil_listbox_widget.delete(0, tk.END)
print("\n" * 100)
# print [ listbox_widget.get(i) for i in listbox_widget.curselection()]
########################################################################
menubar = Menu(root)
filemenu = Menu(menubar, tearoff=0)
filemenu.add_command(label="Generate Random Number", command=donothing)
filemenu.add_command(label="Close", command=exit_editor)
filemenu.add_separator()
filemenu.add_command(label="Exit", command=root.quit)
menubar.add_cascade(label="File", menu=filemenu)
aboutmenu = Menu(menubar, tearoff=0)
menubar.add_cascade(label="About", menu=aboutmenu)
aboutmenu.add_command(label="About", command=about)
aboutmenu.add_command(label="Help", command=help_box)
root.config(menu=menubar)
########################################################################
# DATA SAMPLING ------------------------------
frame_data = tk.Frame(root)
frame_data.place(relx=0.128, rely=0.140, relwidth=0.18,
relheight=0.65, anchor='n')
listbox_widget = tk.Listbox(
frame_data, selectmode="BROWSE", height=20, width=20, background='white')
listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)
listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)
listbox_widget_scrl.configure(command=listbox_widget.yview)
listbox_widget.grid(row=1, sticky=W)
listbox_widget_scrl.grid(row=1, column=1, sticky=NS)
# DATA HASIL ------------------------------
frame_hasil = tk.Frame(root)
frame_hasil.place(relx=0.34, rely=0.140, relwidth=0.18,
relheight=0.65, anchor='n')
hasil_listbox_widget = tk.Listbox(
frame_hasil, selectmode="BROWSE", height=20, width=20, background='white')
hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)
hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)
hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)
# hasil_listbox_entries = random.sample(range(100), 10)
# for hasil_entry in hasil_listbox_entries:
# hasil_listbox_widget.insert(tk.END, hasil_entry)
hasil_listbox_widget.grid(row=1, sticky=W)
hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)
# Entry
entry = tk.Entry(root, font=40, width=7)
entry.place(x=105, y=450)
# BUTTON
bbs_button = tk.Button(root, text="START", font=40,
command=bbs_respon).place(x=434, y=140)
iss_button = tk.Button(root, text="START", font=40,
command=iss_respon).place(x=555, y=140)
sss_button = tk.Button(root, text="START", font=40,
command=sss_respon).place(x=680, y=140)
bsi_button = tk.Button(root, text="START", font=40,
command=bsi_respon).place(x=466, y=330)
# GENERATE DATA SAMPLING
gen_button = tk.Button(root, text="GENERATE", font=40,
command=lambda: generate(entry.get()))
gen_button.place(x=180, y=447)
cls_button = tk.Button(root, text="CLEAN", font=40,
command=cls).place(x=295, y=447)
# RESPON TIME
bbs_time = ttk.Label(root, background="#6367c8",
foreground="#fff")
bbs_time['text'] = "Respon Time"
bbs_time.place(x=429, y=185)
iss_time = tk.Label(root,
background="#6367c8", foreground="#fff")
iss_time['text'] = "Respon Time"
iss_time.place(x=555, y=185)
sss_time = tk.Label(root,
background="#6367c8", foreground="#fff")
sss_time['text'] = "Respon Time"
sss_time.place(x=680, y=185)
bsi_time = tk.Label(root,
background="#6367c8", font=40, foreground="#fff")
bsi_time['text'] = "Respon Time"
bsi_time.place(x=570, y=333)
########################################################################
root.mainloop()
main()
|
flexible
|
{
"blob_id": "8a9feae4ce209def2c98b7bed993f9b5c019a533",
"index": 7480,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n root = tk.Tk()\n root.resizable(width=False, height=False)\n root.title('Tugas Algoritma')\n canvas = tk.Canvas(root, height=500, width=800)\n canvas.pack()\n bg = tk.PhotoImage(file='bg.png')\n bl = tk.Label(root, image=bg)\n bl.place(relwidth=1, relheight=1)\n\n def about():\n messagebox.showinfo('About', 'Simulasi Algoritma Sorting')\n\n def help_box(event=None):\n messagebox.showinfo('Help', 'For help email to [email protected]',\n icon='question')\n\n def exit_editor():\n if messagebox.askokcancel('Quti', 'Do you really want to quit?'):\n root.destroy()\n root.protocol('WM_DELETE_WINDOW', exit_editor)\n\n def donothing():\n print('Nothing to do :v')\n\n def bbsort(angka):\n for i in range(len(angka) - 1, 0, -1):\n for j in range(i):\n if angka[j] > angka[j + 1]:\n tampung = angka[j]\n angka[j] = angka[j + 1]\n angka[j + 1] = tampung\n\n def sssort(angka):\n for i in range(len(angka)):\n min_idx = i\n for j in range(i, len(angka)):\n if angka[j] < angka[min_idx]:\n min_idx = j\n tampung = angka[i]\n angka[i] = angka[min_idx]\n angka[min_idx] = tampung\n\n def issort(angka):\n for i in range(1, len(angka)):\n idx = angka[i]\n for j in range(i - 1, 0, -1):\n if angka[j] > idx:\n angka[j + 1] = angka[j]\n else:\n angka[j + 1] = idx\n break\n\n def bisort(angka):\n for i in range(len(angka)):\n min_idx = i\n for j in range(i, len(angka)):\n if angka[j] < angka[min_idx]:\n min_idx = j\n tampung = angka[i]\n angka[i] = angka[min_idx]\n angka[min_idx] = tampung\n\n def bbs_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### BUBLE SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n bbsort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n bbs_time.config(text='% .12f' % runtime)\n bbs_time.place(x=420, y=185)\n\n def iss_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### INSERTION SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n issort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n iss_time.config(text='% .12f' % runtime)\n iss_time.place(x=545, y=185)\n\n def sss_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### SELECTION SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n sssort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n sss_time.config(text='% .12f' % runtime)\n sss_time.place(x=670, y=185)\n\n def bsi_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print('#################################################### BSI')\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n bisort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n bsi_time.config(text='% .12f' % runtime)\n bsi_time.place(x=570, y=333)\n\n def generate(entry):\n listbox_widget.delete(0, tk.END)\n l = int(entry)\n listrandom = []\n for i in range(l):\n value = random.randint(1, 1000)\n listrandom.append(value)\n listbox_widget.insert(tk.END, value)\n angka = listrandom\n\n def cls():\n hasil_listbox_widget.delete(0, tk.END)\n print('\\n' * 100)\n menubar = Menu(root)\n filemenu = Menu(menubar, tearoff=0)\n filemenu.add_command(label='Generate Random Number', command=donothing)\n filemenu.add_command(label='Close', command=exit_editor)\n filemenu.add_separator()\n filemenu.add_command(label='Exit', command=root.quit)\n menubar.add_cascade(label='File', menu=filemenu)\n aboutmenu = Menu(menubar, tearoff=0)\n menubar.add_cascade(label='About', menu=aboutmenu)\n aboutmenu.add_command(label='About', command=about)\n aboutmenu.add_command(label='Help', command=help_box)\n root.config(menu=menubar)\n frame_data = tk.Frame(root)\n frame_data.place(relx=0.128, rely=0.14, relwidth=0.18, relheight=0.65,\n anchor='n')\n listbox_widget = tk.Listbox(frame_data, selectmode='BROWSE', height=20,\n width=20, background='white')\n listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)\n listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)\n listbox_widget_scrl.configure(command=listbox_widget.yview)\n listbox_widget.grid(row=1, sticky=W)\n listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\n frame_hasil = tk.Frame(root)\n frame_hasil.place(relx=0.34, rely=0.14, relwidth=0.18, relheight=0.65,\n anchor='n')\n hasil_listbox_widget = tk.Listbox(frame_hasil, selectmode='BROWSE',\n height=20, width=20, background='white')\n hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)\n hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)\n hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)\n hasil_listbox_widget.grid(row=1, sticky=W)\n hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\n entry = tk.Entry(root, font=40, width=7)\n entry.place(x=105, y=450)\n bbs_button = tk.Button(root, text='START', font=40, command=bbs_respon\n ).place(x=434, y=140)\n iss_button = tk.Button(root, text='START', font=40, command=iss_respon\n ).place(x=555, y=140)\n sss_button = tk.Button(root, text='START', font=40, command=sss_respon\n ).place(x=680, y=140)\n bsi_button = tk.Button(root, text='START', font=40, command=bsi_respon\n ).place(x=466, y=330)\n gen_button = tk.Button(root, text='GENERATE', font=40, command=lambda :\n generate(entry.get()))\n gen_button.place(x=180, y=447)\n cls_button = tk.Button(root, text='CLEAN', font=40, command=cls).place(x\n =295, y=447)\n bbs_time = ttk.Label(root, background='#6367c8', foreground='#fff')\n bbs_time['text'] = 'Respon Time'\n bbs_time.place(x=429, y=185)\n iss_time = tk.Label(root, background='#6367c8', foreground='#fff')\n iss_time['text'] = 'Respon Time'\n iss_time.place(x=555, y=185)\n sss_time = tk.Label(root, background='#6367c8', foreground='#fff')\n sss_time['text'] = 'Respon Time'\n sss_time.place(x=680, y=185)\n bsi_time = tk.Label(root, background='#6367c8', font=40, foreground='#fff')\n bsi_time['text'] = 'Respon Time'\n bsi_time.place(x=570, y=333)\n root.mainloop()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n root = tk.Tk()\n root.resizable(width=False, height=False)\n root.title('Tugas Algoritma')\n canvas = tk.Canvas(root, height=500, width=800)\n canvas.pack()\n bg = tk.PhotoImage(file='bg.png')\n bl = tk.Label(root, image=bg)\n bl.place(relwidth=1, relheight=1)\n\n def about():\n messagebox.showinfo('About', 'Simulasi Algoritma Sorting')\n\n def help_box(event=None):\n messagebox.showinfo('Help', 'For help email to [email protected]',\n icon='question')\n\n def exit_editor():\n if messagebox.askokcancel('Quti', 'Do you really want to quit?'):\n root.destroy()\n root.protocol('WM_DELETE_WINDOW', exit_editor)\n\n def donothing():\n print('Nothing to do :v')\n\n def bbsort(angka):\n for i in range(len(angka) - 1, 0, -1):\n for j in range(i):\n if angka[j] > angka[j + 1]:\n tampung = angka[j]\n angka[j] = angka[j + 1]\n angka[j + 1] = tampung\n\n def sssort(angka):\n for i in range(len(angka)):\n min_idx = i\n for j in range(i, len(angka)):\n if angka[j] < angka[min_idx]:\n min_idx = j\n tampung = angka[i]\n angka[i] = angka[min_idx]\n angka[min_idx] = tampung\n\n def issort(angka):\n for i in range(1, len(angka)):\n idx = angka[i]\n for j in range(i - 1, 0, -1):\n if angka[j] > idx:\n angka[j + 1] = angka[j]\n else:\n angka[j + 1] = idx\n break\n\n def bisort(angka):\n for i in range(len(angka)):\n min_idx = i\n for j in range(i, len(angka)):\n if angka[j] < angka[min_idx]:\n min_idx = j\n tampung = angka[i]\n angka[i] = angka[min_idx]\n angka[min_idx] = tampung\n\n def bbs_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### BUBLE SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n bbsort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n bbs_time.config(text='% .12f' % runtime)\n bbs_time.place(x=420, y=185)\n\n def iss_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### INSERTION SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n issort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n iss_time.config(text='% .12f' % runtime)\n iss_time.place(x=545, y=185)\n\n def sss_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### SELECTION SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n sssort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n sss_time.config(text='% .12f' % runtime)\n sss_time.place(x=670, y=185)\n\n def bsi_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print('#################################################### BSI')\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n bisort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n bsi_time.config(text='% .12f' % runtime)\n bsi_time.place(x=570, y=333)\n\n def generate(entry):\n listbox_widget.delete(0, tk.END)\n l = int(entry)\n listrandom = []\n for i in range(l):\n value = random.randint(1, 1000)\n listrandom.append(value)\n listbox_widget.insert(tk.END, value)\n angka = listrandom\n\n def cls():\n hasil_listbox_widget.delete(0, tk.END)\n print('\\n' * 100)\n menubar = Menu(root)\n filemenu = Menu(menubar, tearoff=0)\n filemenu.add_command(label='Generate Random Number', command=donothing)\n filemenu.add_command(label='Close', command=exit_editor)\n filemenu.add_separator()\n filemenu.add_command(label='Exit', command=root.quit)\n menubar.add_cascade(label='File', menu=filemenu)\n aboutmenu = Menu(menubar, tearoff=0)\n menubar.add_cascade(label='About', menu=aboutmenu)\n aboutmenu.add_command(label='About', command=about)\n aboutmenu.add_command(label='Help', command=help_box)\n root.config(menu=menubar)\n frame_data = tk.Frame(root)\n frame_data.place(relx=0.128, rely=0.14, relwidth=0.18, relheight=0.65,\n anchor='n')\n listbox_widget = tk.Listbox(frame_data, selectmode='BROWSE', height=20,\n width=20, background='white')\n listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)\n listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)\n listbox_widget_scrl.configure(command=listbox_widget.yview)\n listbox_widget.grid(row=1, sticky=W)\n listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\n frame_hasil = tk.Frame(root)\n frame_hasil.place(relx=0.34, rely=0.14, relwidth=0.18, relheight=0.65,\n anchor='n')\n hasil_listbox_widget = tk.Listbox(frame_hasil, selectmode='BROWSE',\n height=20, width=20, background='white')\n hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)\n hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)\n hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)\n hasil_listbox_widget.grid(row=1, sticky=W)\n hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\n entry = tk.Entry(root, font=40, width=7)\n entry.place(x=105, y=450)\n bbs_button = tk.Button(root, text='START', font=40, command=bbs_respon\n ).place(x=434, y=140)\n iss_button = tk.Button(root, text='START', font=40, command=iss_respon\n ).place(x=555, y=140)\n sss_button = tk.Button(root, text='START', font=40, command=sss_respon\n ).place(x=680, y=140)\n bsi_button = tk.Button(root, text='START', font=40, command=bsi_respon\n ).place(x=466, y=330)\n gen_button = tk.Button(root, text='GENERATE', font=40, command=lambda :\n generate(entry.get()))\n gen_button.place(x=180, y=447)\n cls_button = tk.Button(root, text='CLEAN', font=40, command=cls).place(x\n =295, y=447)\n bbs_time = ttk.Label(root, background='#6367c8', foreground='#fff')\n bbs_time['text'] = 'Respon Time'\n bbs_time.place(x=429, y=185)\n iss_time = tk.Label(root, background='#6367c8', foreground='#fff')\n iss_time['text'] = 'Respon Time'\n iss_time.place(x=555, y=185)\n sss_time = tk.Label(root, background='#6367c8', foreground='#fff')\n sss_time['text'] = 'Respon Time'\n sss_time.place(x=680, y=185)\n bsi_time = tk.Label(root, background='#6367c8', font=40, foreground='#fff')\n bsi_time['text'] = 'Respon Time'\n bsi_time.place(x=570, y=333)\n root.mainloop()\n\n\nmain()\n",
"step-4": "from tkinter import *\nimport tkinter as tk\nfrom tkinter import ttk\nfrom tkinter import messagebox\nimport random\nimport numpy as np\nimport timeit\n\n\ndef main():\n root = tk.Tk()\n root.resizable(width=False, height=False)\n root.title('Tugas Algoritma')\n canvas = tk.Canvas(root, height=500, width=800)\n canvas.pack()\n bg = tk.PhotoImage(file='bg.png')\n bl = tk.Label(root, image=bg)\n bl.place(relwidth=1, relheight=1)\n\n def about():\n messagebox.showinfo('About', 'Simulasi Algoritma Sorting')\n\n def help_box(event=None):\n messagebox.showinfo('Help', 'For help email to [email protected]',\n icon='question')\n\n def exit_editor():\n if messagebox.askokcancel('Quti', 'Do you really want to quit?'):\n root.destroy()\n root.protocol('WM_DELETE_WINDOW', exit_editor)\n\n def donothing():\n print('Nothing to do :v')\n\n def bbsort(angka):\n for i in range(len(angka) - 1, 0, -1):\n for j in range(i):\n if angka[j] > angka[j + 1]:\n tampung = angka[j]\n angka[j] = angka[j + 1]\n angka[j + 1] = tampung\n\n def sssort(angka):\n for i in range(len(angka)):\n min_idx = i\n for j in range(i, len(angka)):\n if angka[j] < angka[min_idx]:\n min_idx = j\n tampung = angka[i]\n angka[i] = angka[min_idx]\n angka[min_idx] = tampung\n\n def issort(angka):\n for i in range(1, len(angka)):\n idx = angka[i]\n for j in range(i - 1, 0, -1):\n if angka[j] > idx:\n angka[j + 1] = angka[j]\n else:\n angka[j + 1] = idx\n break\n\n def bisort(angka):\n for i in range(len(angka)):\n min_idx = i\n for j in range(i, len(angka)):\n if angka[j] < angka[min_idx]:\n min_idx = j\n tampung = angka[i]\n angka[i] = angka[min_idx]\n angka[min_idx] = tampung\n\n def bbs_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### BUBLE SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n bbsort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n bbs_time.config(text='% .12f' % runtime)\n bbs_time.place(x=420, y=185)\n\n def iss_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### INSERTION SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n issort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n iss_time.config(text='% .12f' % runtime)\n iss_time.place(x=545, y=185)\n\n def sss_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### SELECTION SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n sssort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n sss_time.config(text='% .12f' % runtime)\n sss_time.place(x=670, y=185)\n\n def bsi_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print('#################################################### BSI')\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n bisort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n bsi_time.config(text='% .12f' % runtime)\n bsi_time.place(x=570, y=333)\n\n def generate(entry):\n listbox_widget.delete(0, tk.END)\n l = int(entry)\n listrandom = []\n for i in range(l):\n value = random.randint(1, 1000)\n listrandom.append(value)\n listbox_widget.insert(tk.END, value)\n angka = listrandom\n\n def cls():\n hasil_listbox_widget.delete(0, tk.END)\n print('\\n' * 100)\n menubar = Menu(root)\n filemenu = Menu(menubar, tearoff=0)\n filemenu.add_command(label='Generate Random Number', command=donothing)\n filemenu.add_command(label='Close', command=exit_editor)\n filemenu.add_separator()\n filemenu.add_command(label='Exit', command=root.quit)\n menubar.add_cascade(label='File', menu=filemenu)\n aboutmenu = Menu(menubar, tearoff=0)\n menubar.add_cascade(label='About', menu=aboutmenu)\n aboutmenu.add_command(label='About', command=about)\n aboutmenu.add_command(label='Help', command=help_box)\n root.config(menu=menubar)\n frame_data = tk.Frame(root)\n frame_data.place(relx=0.128, rely=0.14, relwidth=0.18, relheight=0.65,\n anchor='n')\n listbox_widget = tk.Listbox(frame_data, selectmode='BROWSE', height=20,\n width=20, background='white')\n listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)\n listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)\n listbox_widget_scrl.configure(command=listbox_widget.yview)\n listbox_widget.grid(row=1, sticky=W)\n listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\n frame_hasil = tk.Frame(root)\n frame_hasil.place(relx=0.34, rely=0.14, relwidth=0.18, relheight=0.65,\n anchor='n')\n hasil_listbox_widget = tk.Listbox(frame_hasil, selectmode='BROWSE',\n height=20, width=20, background='white')\n hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)\n hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)\n hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)\n hasil_listbox_widget.grid(row=1, sticky=W)\n hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\n entry = tk.Entry(root, font=40, width=7)\n entry.place(x=105, y=450)\n bbs_button = tk.Button(root, text='START', font=40, command=bbs_respon\n ).place(x=434, y=140)\n iss_button = tk.Button(root, text='START', font=40, command=iss_respon\n ).place(x=555, y=140)\n sss_button = tk.Button(root, text='START', font=40, command=sss_respon\n ).place(x=680, y=140)\n bsi_button = tk.Button(root, text='START', font=40, command=bsi_respon\n ).place(x=466, y=330)\n gen_button = tk.Button(root, text='GENERATE', font=40, command=lambda :\n generate(entry.get()))\n gen_button.place(x=180, y=447)\n cls_button = tk.Button(root, text='CLEAN', font=40, command=cls).place(x\n =295, y=447)\n bbs_time = ttk.Label(root, background='#6367c8', foreground='#fff')\n bbs_time['text'] = 'Respon Time'\n bbs_time.place(x=429, y=185)\n iss_time = tk.Label(root, background='#6367c8', foreground='#fff')\n iss_time['text'] = 'Respon Time'\n iss_time.place(x=555, y=185)\n sss_time = tk.Label(root, background='#6367c8', foreground='#fff')\n sss_time['text'] = 'Respon Time'\n sss_time.place(x=680, y=185)\n bsi_time = tk.Label(root, background='#6367c8', font=40, foreground='#fff')\n bsi_time['text'] = 'Respon Time'\n bsi_time.place(x=570, y=333)\n root.mainloop()\n\n\nmain()\n",
"step-5": "from tkinter import *\r\nimport tkinter as tk\r\nfrom tkinter import ttk\r\nfrom tkinter import messagebox\r\n\r\nimport random\r\nimport numpy as np\r\n\r\nimport timeit\r\n\r\n\r\ndef main():\r\n\r\n root = tk.Tk()\r\n # root.geometry('800x500')\r\n root.resizable(width=False, height=False)\r\n root.title('Tugas Algoritma')\r\n\r\n canvas = tk.Canvas(root, height=500, width=800)\r\n canvas.pack()\r\n\r\n bg = tk.PhotoImage(file='bg.png')\r\n bl = tk.Label(root, image=bg)\r\n bl.place(relwidth=1, relheight=1)\r\n\r\n # root.iconbitmap('icons/pypad.ico')\r\n\r\n ########################################################################\r\n\r\n def about():\r\n messagebox.showinfo(\"About\", \"Simulasi Algoritma Sorting\")\r\n\r\n def help_box(event=None):\r\n messagebox.showinfo(\r\n \"Help\", \"For help email to [email protected]\", icon='question')\r\n\r\n def exit_editor():\r\n if messagebox.askokcancel(\"Quti\", \"Do you really want to quit?\"):\r\n root.destroy()\r\n root.protocol('WM_DELETE_WINDOW', exit_editor)\r\n\r\n def donothing():\r\n print(\"Nothing to do :v\")\r\n\r\n ########################################################################\r\n # Bubble Sort\r\n def bbsort(angka):\r\n for i in range(len(angka)-1, 0, -1):\r\n # swap = False\r\n for j in range(i):\r\n if angka[j] > angka[j+1]:\r\n tampung = angka[j]\r\n angka[j] = angka[j+1]\r\n angka[j+1] = tampung\r\n # swap = True\r\n # if not swap:\r\n # break\r\n\r\n # Selection Sort\r\n\r\n def sssort(angka):\r\n for i in range(len(angka)):\r\n min_idx = i\r\n for j in range(i, len(angka)):\r\n if angka[j] < angka[min_idx]:\r\n min_idx = j\r\n tampung = angka[i]\r\n angka[i] = angka[min_idx]\r\n angka[min_idx] = tampung\r\n\r\n # Insertion Sort\r\n\r\n def issort(angka):\r\n for i in range(1, len(angka)):\r\n idx = angka[i]\r\n for j in range(i-1, 0, -1):\r\n if angka[j] > idx:\r\n angka[j+1] = angka[j]\r\n else:\r\n angka[j+1] = idx\r\n break\r\n\r\n # Bubble+Insertion+Selection Sort Optimize\r\n\r\n def bisort(angka):\r\n for i in range(len(angka)):\r\n min_idx = i\r\n for j in range(i, len(angka)):\r\n if angka[j] < angka[min_idx]:\r\n min_idx = j\r\n tampung = angka[i]\r\n angka[i] = angka[min_idx]\r\n angka[min_idx] = tampung\r\n\r\n ########################################################################\r\n\r\n def bbs_respon():\r\n if len(listbox_widget.get(0, tk.END)) == 0:\r\n print(\"Data Lu mane?\")\r\n else:\r\n print(\"#################################################### BUBLE SORT \")\r\n hasil_listbox_widget.delete(0, tk.END)\r\n angka = list(listbox_widget.get(0, tk.END))\r\n print(\"Data Sample \\n\", angka)\r\n start = timeit.default_timer()\r\n bbsort(angka)\r\n stop = timeit.default_timer()\r\n runtime = stop - start\r\n print(\"\\n Hasil Sorting \\n\", angka)\r\n print('RunTime : ', runtime)\r\n print('Jumlah data : ', len(angka))\r\n\r\n for hasil_entry in angka:\r\n hasil_listbox_widget.insert(tk.END, hasil_entry)\r\n\r\n bbs_time.config(text=\"% .12f\" % runtime)\r\n bbs_time.place(x=420, y=185)\r\n\r\n def iss_respon():\r\n if len(listbox_widget.get(0, tk.END)) == 0:\r\n print(\"Data Lu mane?\")\r\n else:\r\n print(\"#################################################### INSERTION SORT \")\r\n hasil_listbox_widget.delete(0, tk.END)\r\n angka = list(listbox_widget.get(0, tk.END))\r\n print(\"Data Sample \\n\", angka)\r\n start = timeit.default_timer()\r\n issort(angka)\r\n stop = timeit.default_timer()\r\n runtime = stop - start\r\n print(\"\\n Hasil Sorting \\n\", angka)\r\n print('RunTime : ', runtime)\r\n print('Jumlah data : ', len(angka))\r\n\r\n for hasil_entry in angka:\r\n hasil_listbox_widget.insert(tk.END, hasil_entry)\r\n\r\n iss_time.config(text=\"% .12f\" % runtime)\r\n iss_time.place(x=545, y=185)\r\n\r\n def sss_respon():\r\n if len(listbox_widget.get(0, tk.END)) == 0:\r\n print(\"Data Lu mane?\")\r\n else:\r\n print(\"#################################################### SELECTION SORT \")\r\n hasil_listbox_widget.delete(0, tk.END)\r\n angka = list(listbox_widget.get(0, tk.END))\r\n print(\"Data Sample \\n\", angka)\r\n start = timeit.default_timer()\r\n sssort(angka)\r\n stop = timeit.default_timer()\r\n runtime = stop - start\r\n print(\"\\n Hasil Sorting \\n\", angka)\r\n print('RunTime : ', runtime)\r\n print('Jumlah data : ', len(angka))\r\n\r\n for hasil_entry in angka:\r\n hasil_listbox_widget.insert(tk.END, hasil_entry)\r\n\r\n sss_time.config(text=\"% .12f\" % runtime)\r\n sss_time.place(x=670, y=185)\r\n\r\n def bsi_respon():\r\n if len(listbox_widget.get(0, tk.END)) == 0:\r\n print(\"Data Lu mane?\")\r\n else:\r\n print(\"#################################################### BSI\")\r\n hasil_listbox_widget.delete(0, tk.END)\r\n angka = list(listbox_widget.get(0, tk.END))\r\n print(\"Data Sample \\n\", angka)\r\n start = timeit.default_timer()\r\n bisort(angka)\r\n stop = timeit.default_timer()\r\n runtime = stop - start\r\n print(\"\\n Hasil Sorting \\n\", angka)\r\n print('RunTime : ', runtime)\r\n print('Jumlah data : ', len(angka))\r\n\r\n for hasil_entry in angka:\r\n hasil_listbox_widget.insert(tk.END, hasil_entry)\r\n\r\n bsi_time.config(text=\"% .12f\" % runtime)\r\n bsi_time.place(x=570, y=333)\r\n\r\n def generate(entry):\r\n\r\n listbox_widget.delete(0, tk.END)\r\n\r\n l = int(entry)\r\n listrandom = []\r\n for i in range(l):\r\n value = random.randint(1, 1000)\r\n listrandom.append(value)\r\n listbox_widget.insert(tk.END, value)\r\n\r\n angka = listrandom\r\n # print(listrandom)\r\n\r\n # listbox_entries = random.sample(range(100), int(entry))\r\n # for entry in listbox_entries:\r\n # listbox_widget.insert(tk.END, entry)\r\n #angka = listbox_widget.get(0, tk.END)\r\n\r\n def cls():\r\n hasil_listbox_widget.delete(0, tk.END)\r\n print(\"\\n\" * 100)\r\n # print [ listbox_widget.get(i) for i in listbox_widget.curselection()]\r\n\r\n ########################################################################\r\n menubar = Menu(root)\r\n\r\n filemenu = Menu(menubar, tearoff=0)\r\n filemenu.add_command(label=\"Generate Random Number\", command=donothing)\r\n filemenu.add_command(label=\"Close\", command=exit_editor)\r\n filemenu.add_separator()\r\n filemenu.add_command(label=\"Exit\", command=root.quit)\r\n\r\n menubar.add_cascade(label=\"File\", menu=filemenu)\r\n\r\n aboutmenu = Menu(menubar, tearoff=0)\r\n menubar.add_cascade(label=\"About\", menu=aboutmenu)\r\n aboutmenu.add_command(label=\"About\", command=about)\r\n aboutmenu.add_command(label=\"Help\", command=help_box)\r\n\r\n root.config(menu=menubar)\r\n\r\n ########################################################################\r\n\r\n # DATA SAMPLING ------------------------------\r\n frame_data = tk.Frame(root)\r\n frame_data.place(relx=0.128, rely=0.140, relwidth=0.18,\r\n relheight=0.65, anchor='n')\r\n\r\n listbox_widget = tk.Listbox(\r\n frame_data, selectmode=\"BROWSE\", height=20, width=20, background='white')\r\n listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)\r\n listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)\r\n listbox_widget_scrl.configure(command=listbox_widget.yview)\r\n\r\n listbox_widget.grid(row=1, sticky=W)\r\n listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\r\n\r\n # DATA HASIL ------------------------------\r\n frame_hasil = tk.Frame(root)\r\n frame_hasil.place(relx=0.34, rely=0.140, relwidth=0.18,\r\n relheight=0.65, anchor='n')\r\n\r\n hasil_listbox_widget = tk.Listbox(\r\n frame_hasil, selectmode=\"BROWSE\", height=20, width=20, background='white')\r\n hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)\r\n hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)\r\n hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)\r\n # hasil_listbox_entries = random.sample(range(100), 10)\r\n # for hasil_entry in hasil_listbox_entries:\r\n # hasil_listbox_widget.insert(tk.END, hasil_entry)\r\n hasil_listbox_widget.grid(row=1, sticky=W)\r\n hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\r\n\r\n # Entry\r\n entry = tk.Entry(root, font=40, width=7)\r\n entry.place(x=105, y=450)\r\n\r\n # BUTTON\r\n bbs_button = tk.Button(root, text=\"START\", font=40,\r\n command=bbs_respon).place(x=434, y=140)\r\n iss_button = tk.Button(root, text=\"START\", font=40,\r\n command=iss_respon).place(x=555, y=140)\r\n sss_button = tk.Button(root, text=\"START\", font=40,\r\n command=sss_respon).place(x=680, y=140)\r\n bsi_button = tk.Button(root, text=\"START\", font=40,\r\n command=bsi_respon).place(x=466, y=330)\r\n # GENERATE DATA SAMPLING\r\n gen_button = tk.Button(root, text=\"GENERATE\", font=40,\r\n command=lambda: generate(entry.get()))\r\n gen_button.place(x=180, y=447)\r\n cls_button = tk.Button(root, text=\"CLEAN\", font=40,\r\n command=cls).place(x=295, y=447)\r\n\r\n # RESPON TIME\r\n bbs_time = ttk.Label(root, background=\"#6367c8\",\r\n foreground=\"#fff\")\r\n bbs_time['text'] = \"Respon Time\"\r\n bbs_time.place(x=429, y=185)\r\n\r\n iss_time = tk.Label(root,\r\n background=\"#6367c8\", foreground=\"#fff\")\r\n iss_time['text'] = \"Respon Time\"\r\n iss_time.place(x=555, y=185)\r\n\r\n sss_time = tk.Label(root,\r\n background=\"#6367c8\", foreground=\"#fff\")\r\n sss_time['text'] = \"Respon Time\"\r\n sss_time.place(x=680, y=185)\r\n\r\n bsi_time = tk.Label(root,\r\n background=\"#6367c8\", font=40, foreground=\"#fff\")\r\n bsi_time['text'] = \"Respon Time\"\r\n bsi_time.place(x=570, y=333)\r\n\r\n ########################################################################\r\n\r\n root.mainloop()\r\n\r\n\r\nmain()\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class UserModel:
<|reserved_special_token_0|>
def __init__(self, name, password, birth, sex, phone, email, id=0):
if id == 0:
self.id = self.id + 1
else:
self.id = id
self.name = name
self.email = email
s = hashlib.sha256()
s.update(password.encode('utf-8'))
self.password = s.hexdigest()
self.birth = birth
self.sex = sex
self.phone = phone
def add_user(self):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
insert_query = (
'INSERT INTO users (name, password, sex, birth, phone, email) VALUES(?, ?, ?, ?, ?, ?)'
)
cursor.execute(insert_query, (self.name, self.password, self.sex,
self.birth, self.phone, self.email))
conn.commit()
conn.close()
@staticmethod
def get_user(self, id):
user = None
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
query_one_query = 'SELECT * FROM users WHERE id=?'
print(query_one_query)
result = cursor.execute(query_one_query, (str(id),)).fetchone()
if result is None:
return None
print(result)
user = UserModel(id=result[0], name=result[1], password=result[2],
sex=result[3], birth=result[4], phone=result[5], email=result[6])
user.id = result[0]
conn.close()
return user
@staticmethod
def delete_user(self, id):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
delete_query = 'DELETE FROM users WHERE id=?'
cursor.execute(delete_query, (id,))
conn.commit()
conn.close()
def update_user(self):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
update_query = (
'UPDATE users SET name=?, password=?, sex=?, birth=?, phone=?, email=? WHERE id=?'
)
cursor.execute(update_query, (self.name, self.password, self.sex,
self.birth, self.phone, self.email, self.id))
conn.commit()
conn.close()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserModel:
<|reserved_special_token_0|>
def __init__(self, name, password, birth, sex, phone, email, id=0):
if id == 0:
self.id = self.id + 1
else:
self.id = id
self.name = name
self.email = email
s = hashlib.sha256()
s.update(password.encode('utf-8'))
self.password = s.hexdigest()
self.birth = birth
self.sex = sex
self.phone = phone
def add_user(self):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
insert_query = (
'INSERT INTO users (name, password, sex, birth, phone, email) VALUES(?, ?, ?, ?, ?, ?)'
)
cursor.execute(insert_query, (self.name, self.password, self.sex,
self.birth, self.phone, self.email))
conn.commit()
conn.close()
@staticmethod
def get_user(self, id):
user = None
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
query_one_query = 'SELECT * FROM users WHERE id=?'
print(query_one_query)
result = cursor.execute(query_one_query, (str(id),)).fetchone()
if result is None:
return None
print(result)
user = UserModel(id=result[0], name=result[1], password=result[2],
sex=result[3], birth=result[4], phone=result[5], email=result[6])
user.id = result[0]
conn.close()
return user
@staticmethod
def delete_user(self, id):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
delete_query = 'DELETE FROM users WHERE id=?'
cursor.execute(delete_query, (id,))
conn.commit()
conn.close()
def update_user(self):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
update_query = (
'UPDATE users SET name=?, password=?, sex=?, birth=?, phone=?, email=? WHERE id=?'
)
cursor.execute(update_query, (self.name, self.password, self.sex,
self.birth, self.phone, self.email, self.id))
conn.commit()
conn.close()
@staticmethod
def get_all_user():
users = []
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
query_one_query = 'SELECT * FROM users'
for item in cursor.execute(query_one_query):
user = UserModel(id=item[0], name=item[1], password=item[2],
sex=item[3], birth=item[4], phone=item[5], email=item[6])
users.append(user)
conn.close()
return users
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserModel:
id = 0
def __init__(self, name, password, birth, sex, phone, email, id=0):
if id == 0:
self.id = self.id + 1
else:
self.id = id
self.name = name
self.email = email
s = hashlib.sha256()
s.update(password.encode('utf-8'))
self.password = s.hexdigest()
self.birth = birth
self.sex = sex
self.phone = phone
def add_user(self):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
insert_query = (
'INSERT INTO users (name, password, sex, birth, phone, email) VALUES(?, ?, ?, ?, ?, ?)'
)
cursor.execute(insert_query, (self.name, self.password, self.sex,
self.birth, self.phone, self.email))
conn.commit()
conn.close()
@staticmethod
def get_user(self, id):
user = None
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
query_one_query = 'SELECT * FROM users WHERE id=?'
print(query_one_query)
result = cursor.execute(query_one_query, (str(id),)).fetchone()
if result is None:
return None
print(result)
user = UserModel(id=result[0], name=result[1], password=result[2],
sex=result[3], birth=result[4], phone=result[5], email=result[6])
user.id = result[0]
conn.close()
return user
@staticmethod
def delete_user(self, id):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
delete_query = 'DELETE FROM users WHERE id=?'
cursor.execute(delete_query, (id,))
conn.commit()
conn.close()
def update_user(self):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
update_query = (
'UPDATE users SET name=?, password=?, sex=?, birth=?, phone=?, email=? WHERE id=?'
)
cursor.execute(update_query, (self.name, self.password, self.sex,
self.birth, self.phone, self.email, self.id))
conn.commit()
conn.close()
@staticmethod
def get_all_user():
users = []
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
query_one_query = 'SELECT * FROM users'
for item in cursor.execute(query_one_query):
user = UserModel(id=item[0], name=item[1], password=item[2],
sex=item[3], birth=item[4], phone=item[5], email=item[6])
users.append(user)
conn.close()
return users
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserModel:
id = 0
def __init__(self, name, password, birth, sex, phone, email, id=0):
if id == 0:
self.id = self.id + 1
else:
self.id = id
self.name = name
self.email = email
s = hashlib.sha256()
s.update(password.encode('utf-8'))
self.password = s.hexdigest()
self.birth = birth
self.sex = sex
self.phone = phone
def add_user(self):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
insert_query = (
'INSERT INTO users (name, password, sex, birth, phone, email) VALUES(?, ?, ?, ?, ?, ?)'
)
cursor.execute(insert_query, (self.name, self.password, self.sex,
self.birth, self.phone, self.email))
conn.commit()
conn.close()
@staticmethod
def get_user(self, id):
user = None
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
query_one_query = 'SELECT * FROM users WHERE id=?'
print(query_one_query)
result = cursor.execute(query_one_query, (str(id),)).fetchone()
if result is None:
return None
print(result)
user = UserModel(id=result[0], name=result[1], password=result[2],
sex=result[3], birth=result[4], phone=result[5], email=result[6])
user.id = result[0]
conn.close()
return user
@staticmethod
def delete_user(self, id):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
delete_query = 'DELETE FROM users WHERE id=?'
cursor.execute(delete_query, (id,))
conn.commit()
conn.close()
def update_user(self):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
update_query = (
'UPDATE users SET name=?, password=?, sex=?, birth=?, phone=?, email=? WHERE id=?'
)
cursor.execute(update_query, (self.name, self.password, self.sex,
self.birth, self.phone, self.email, self.id))
conn.commit()
conn.close()
@staticmethod
def get_all_user():
users = []
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
query_one_query = 'SELECT * FROM users'
for item in cursor.execute(query_one_query):
user = UserModel(id=item[0], name=item[1], password=item[2],
sex=item[3], birth=item[4], phone=item[5], email=item[6])
users.append(user)
conn.close()
return users
if __name__ == '__main__':
print(UserModel.get_all_user())
<|reserved_special_token_1|>
import sqlite3
import hashlib
users = []
class UserModel:
id = 0
def __init__(self, name, password, birth, sex, phone, email, id=0):
if(id == 0):
self.id = self.id + 1
else:
self.id = id
self.name = name
self.email = email
#處理密碼
s = hashlib.sha256()
s.update(password.encode('utf-8'))
self.password = s.hexdigest()
self.birth = birth
self.sex = sex
self.phone = phone
def add_user(self):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
insert_query = 'INSERT INTO users (name, password, sex, birth, phone, email) \
VALUES(?, ?, ?, ?, ?, ?)'
cursor.execute(insert_query, (self.name, self.password, self.sex,
self.birth, self.phone, self.email))
conn.commit()
conn.close()
@staticmethod
def get_user(self, id):
user = None
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
query_one_query = 'SELECT * FROM users WHERE id=?'
print(query_one_query)
result = cursor.execute(query_one_query, (str(id),)).fetchone()
if result is None:
return None
print(result)
user = UserModel(id=result[0], name=result[1], password=result[2], sex = result[3], \
birth=result[4], phone=result[5], email=result[6])
user.id = result[0]
conn.close()
return user
@staticmethod
def delete_user(self, id):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
delete_query = 'DELETE FROM users WHERE id=?'
cursor.execute(delete_query, (id,))
conn.commit()
conn.close()
def update_user(self):
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
update_query = 'UPDATE users SET name=?, password=?, sex=?, birth=?, phone=?, email=? WHERE id=?'
cursor.execute(update_query, (self.name, self.password, self.sex,
self.birth, self.phone, self.email, self.id))
conn.commit()
conn.close()
@staticmethod
def get_all_user():
users = []
conn = sqlite3.connect('main.db')
cursor = conn.cursor()
query_one_query = 'SELECT * FROM users'
for item in cursor.execute(query_one_query):
user = UserModel(id=item[0], name=item[1], password=item[2], sex = item[3], \
birth=item[4], phone=item[5], email=item[6])
users.append(user)
conn.close()
return users
if __name__ == "__main__":
print(UserModel.get_all_user())
|
flexible
|
{
"blob_id": "e675283f14a3d29fba878e7f6d9592130611c2be",
"index": 1469,
"step-1": "<mask token>\n\n\nclass UserModel:\n <mask token>\n\n def __init__(self, name, password, birth, sex, phone, email, id=0):\n if id == 0:\n self.id = self.id + 1\n else:\n self.id = id\n self.name = name\n self.email = email\n s = hashlib.sha256()\n s.update(password.encode('utf-8'))\n self.password = s.hexdigest()\n self.birth = birth\n self.sex = sex\n self.phone = phone\n\n def add_user(self):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n insert_query = (\n 'INSERT INTO users (name, password, sex, birth, phone, email) VALUES(?, ?, ?, ?, ?, ?)'\n )\n cursor.execute(insert_query, (self.name, self.password, self.sex,\n self.birth, self.phone, self.email))\n conn.commit()\n conn.close()\n\n @staticmethod\n def get_user(self, id):\n user = None\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n query_one_query = 'SELECT * FROM users WHERE id=?'\n print(query_one_query)\n result = cursor.execute(query_one_query, (str(id),)).fetchone()\n if result is None:\n return None\n print(result)\n user = UserModel(id=result[0], name=result[1], password=result[2],\n sex=result[3], birth=result[4], phone=result[5], email=result[6])\n user.id = result[0]\n conn.close()\n return user\n\n @staticmethod\n def delete_user(self, id):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n delete_query = 'DELETE FROM users WHERE id=?'\n cursor.execute(delete_query, (id,))\n conn.commit()\n conn.close()\n\n def update_user(self):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n update_query = (\n 'UPDATE users SET name=?, password=?, sex=?, birth=?, phone=?, email=? WHERE id=?'\n )\n cursor.execute(update_query, (self.name, self.password, self.sex,\n self.birth, self.phone, self.email, self.id))\n conn.commit()\n conn.close()\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass UserModel:\n <mask token>\n\n def __init__(self, name, password, birth, sex, phone, email, id=0):\n if id == 0:\n self.id = self.id + 1\n else:\n self.id = id\n self.name = name\n self.email = email\n s = hashlib.sha256()\n s.update(password.encode('utf-8'))\n self.password = s.hexdigest()\n self.birth = birth\n self.sex = sex\n self.phone = phone\n\n def add_user(self):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n insert_query = (\n 'INSERT INTO users (name, password, sex, birth, phone, email) VALUES(?, ?, ?, ?, ?, ?)'\n )\n cursor.execute(insert_query, (self.name, self.password, self.sex,\n self.birth, self.phone, self.email))\n conn.commit()\n conn.close()\n\n @staticmethod\n def get_user(self, id):\n user = None\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n query_one_query = 'SELECT * FROM users WHERE id=?'\n print(query_one_query)\n result = cursor.execute(query_one_query, (str(id),)).fetchone()\n if result is None:\n return None\n print(result)\n user = UserModel(id=result[0], name=result[1], password=result[2],\n sex=result[3], birth=result[4], phone=result[5], email=result[6])\n user.id = result[0]\n conn.close()\n return user\n\n @staticmethod\n def delete_user(self, id):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n delete_query = 'DELETE FROM users WHERE id=?'\n cursor.execute(delete_query, (id,))\n conn.commit()\n conn.close()\n\n def update_user(self):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n update_query = (\n 'UPDATE users SET name=?, password=?, sex=?, birth=?, phone=?, email=? WHERE id=?'\n )\n cursor.execute(update_query, (self.name, self.password, self.sex,\n self.birth, self.phone, self.email, self.id))\n conn.commit()\n conn.close()\n\n @staticmethod\n def get_all_user():\n users = []\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n query_one_query = 'SELECT * FROM users'\n for item in cursor.execute(query_one_query):\n user = UserModel(id=item[0], name=item[1], password=item[2],\n sex=item[3], birth=item[4], phone=item[5], email=item[6])\n users.append(user)\n conn.close()\n return users\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass UserModel:\n id = 0\n\n def __init__(self, name, password, birth, sex, phone, email, id=0):\n if id == 0:\n self.id = self.id + 1\n else:\n self.id = id\n self.name = name\n self.email = email\n s = hashlib.sha256()\n s.update(password.encode('utf-8'))\n self.password = s.hexdigest()\n self.birth = birth\n self.sex = sex\n self.phone = phone\n\n def add_user(self):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n insert_query = (\n 'INSERT INTO users (name, password, sex, birth, phone, email) VALUES(?, ?, ?, ?, ?, ?)'\n )\n cursor.execute(insert_query, (self.name, self.password, self.sex,\n self.birth, self.phone, self.email))\n conn.commit()\n conn.close()\n\n @staticmethod\n def get_user(self, id):\n user = None\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n query_one_query = 'SELECT * FROM users WHERE id=?'\n print(query_one_query)\n result = cursor.execute(query_one_query, (str(id),)).fetchone()\n if result is None:\n return None\n print(result)\n user = UserModel(id=result[0], name=result[1], password=result[2],\n sex=result[3], birth=result[4], phone=result[5], email=result[6])\n user.id = result[0]\n conn.close()\n return user\n\n @staticmethod\n def delete_user(self, id):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n delete_query = 'DELETE FROM users WHERE id=?'\n cursor.execute(delete_query, (id,))\n conn.commit()\n conn.close()\n\n def update_user(self):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n update_query = (\n 'UPDATE users SET name=?, password=?, sex=?, birth=?, phone=?, email=? WHERE id=?'\n )\n cursor.execute(update_query, (self.name, self.password, self.sex,\n self.birth, self.phone, self.email, self.id))\n conn.commit()\n conn.close()\n\n @staticmethod\n def get_all_user():\n users = []\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n query_one_query = 'SELECT * FROM users'\n for item in cursor.execute(query_one_query):\n user = UserModel(id=item[0], name=item[1], password=item[2],\n sex=item[3], birth=item[4], phone=item[5], email=item[6])\n users.append(user)\n conn.close()\n return users\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass UserModel:\n id = 0\n\n def __init__(self, name, password, birth, sex, phone, email, id=0):\n if id == 0:\n self.id = self.id + 1\n else:\n self.id = id\n self.name = name\n self.email = email\n s = hashlib.sha256()\n s.update(password.encode('utf-8'))\n self.password = s.hexdigest()\n self.birth = birth\n self.sex = sex\n self.phone = phone\n\n def add_user(self):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n insert_query = (\n 'INSERT INTO users (name, password, sex, birth, phone, email) VALUES(?, ?, ?, ?, ?, ?)'\n )\n cursor.execute(insert_query, (self.name, self.password, self.sex,\n self.birth, self.phone, self.email))\n conn.commit()\n conn.close()\n\n @staticmethod\n def get_user(self, id):\n user = None\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n query_one_query = 'SELECT * FROM users WHERE id=?'\n print(query_one_query)\n result = cursor.execute(query_one_query, (str(id),)).fetchone()\n if result is None:\n return None\n print(result)\n user = UserModel(id=result[0], name=result[1], password=result[2],\n sex=result[3], birth=result[4], phone=result[5], email=result[6])\n user.id = result[0]\n conn.close()\n return user\n\n @staticmethod\n def delete_user(self, id):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n delete_query = 'DELETE FROM users WHERE id=?'\n cursor.execute(delete_query, (id,))\n conn.commit()\n conn.close()\n\n def update_user(self):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n update_query = (\n 'UPDATE users SET name=?, password=?, sex=?, birth=?, phone=?, email=? WHERE id=?'\n )\n cursor.execute(update_query, (self.name, self.password, self.sex,\n self.birth, self.phone, self.email, self.id))\n conn.commit()\n conn.close()\n\n @staticmethod\n def get_all_user():\n users = []\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n query_one_query = 'SELECT * FROM users'\n for item in cursor.execute(query_one_query):\n user = UserModel(id=item[0], name=item[1], password=item[2],\n sex=item[3], birth=item[4], phone=item[5], email=item[6])\n users.append(user)\n conn.close()\n return users\n\n\nif __name__ == '__main__':\n print(UserModel.get_all_user())\n",
"step-5": "import sqlite3\nimport hashlib\n\nusers = []\n\nclass UserModel:\n id = 0\n\n def __init__(self, name, password, birth, sex, phone, email, id=0):\n if(id == 0):\n self.id = self.id + 1\n else:\n self.id = id\n self.name = name\n self.email = email\n\n #處理密碼\n s = hashlib.sha256()\n s.update(password.encode('utf-8'))\n self.password = s.hexdigest()\n\n self.birth = birth\n self.sex = sex\n self.phone = phone\n\n def add_user(self):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n insert_query = 'INSERT INTO users (name, password, sex, birth, phone, email) \\\n VALUES(?, ?, ?, ?, ?, ?)'\n cursor.execute(insert_query, (self.name, self.password, self.sex,\n self.birth, self.phone, self.email))\n conn.commit()\n conn.close()\n\n @staticmethod\n def get_user(self, id):\n user = None\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n query_one_query = 'SELECT * FROM users WHERE id=?'\n print(query_one_query)\n result = cursor.execute(query_one_query, (str(id),)).fetchone()\n if result is None:\n return None\n print(result)\n user = UserModel(id=result[0], name=result[1], password=result[2], sex = result[3], \\\n birth=result[4], phone=result[5], email=result[6])\n user.id = result[0]\n conn.close()\n return user\n\n @staticmethod\n def delete_user(self, id):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n delete_query = 'DELETE FROM users WHERE id=?'\n cursor.execute(delete_query, (id,))\n conn.commit()\n conn.close()\n\n def update_user(self):\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n update_query = 'UPDATE users SET name=?, password=?, sex=?, birth=?, phone=?, email=? WHERE id=?'\n cursor.execute(update_query, (self.name, self.password, self.sex,\n self.birth, self.phone, self.email, self.id))\n conn.commit()\n conn.close()\n\n @staticmethod\n def get_all_user():\n users = []\n conn = sqlite3.connect('main.db')\n cursor = conn.cursor()\n query_one_query = 'SELECT * FROM users'\n for item in cursor.execute(query_one_query):\n user = UserModel(id=item[0], name=item[1], password=item[2], sex = item[3], \\\n birth=item[4], phone=item[5], email=item[6])\n users.append(user)\n conn.close()\n return users\n\nif __name__ == \"__main__\":\n print(UserModel.get_all_user())",
"step-ids": [
6,
7,
8,
9,
12
]
}
|
[
6,
7,
8,
9,
12
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-07-26 19:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('articles', '0014_auto_20180726_0926'),
]
operations = [
migrations.AlterField(
model_name='articles',
name='cover_url',
field=models.URLField(default='http://pcgsvdl00.bkt.clouddn.com/default/articles/article_01.jpg', max_length=500, verbose_name='封面图'),
),
migrations.AlterField(
model_name='series',
name='cover_url',
field=models.URLField(default='http://pcgsvdl00.bkt.clouddn.com/default/series/series_01.jpg', max_length=500, verbose_name='封面图'),
),
migrations.AlterField(
model_name='specialcolumn',
name='cover_url',
field=models.URLField(default='http://pcgsvdl00.bkt.clouddn.com/default/specialColumn/special_01.jpg', max_length=500, verbose_name='封面图'),
),
]
|
normal
|
{
"blob_id": "671a7ee3fabee6ed8dfafe1bddefb1f94322b0e5",
"index": 2477,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('articles', '0014_auto_20180726_0926')]\n operations = [migrations.AlterField(model_name='articles', name=\n 'cover_url', field=models.URLField(default=\n 'http://pcgsvdl00.bkt.clouddn.com/default/articles/article_01.jpg',\n max_length=500, verbose_name='封面图')), migrations.AlterField(\n model_name='series', name='cover_url', field=models.URLField(\n default=\n 'http://pcgsvdl00.bkt.clouddn.com/default/series/series_01.jpg',\n max_length=500, verbose_name='封面图')), migrations.AlterField(\n model_name='specialcolumn', name='cover_url', field=models.URLField\n (default=\n 'http://pcgsvdl00.bkt.clouddn.com/default/specialColumn/special_01.jpg'\n , max_length=500, verbose_name='封面图'))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('articles', '0014_auto_20180726_0926')]\n operations = [migrations.AlterField(model_name='articles', name=\n 'cover_url', field=models.URLField(default=\n 'http://pcgsvdl00.bkt.clouddn.com/default/articles/article_01.jpg',\n max_length=500, verbose_name='封面图')), migrations.AlterField(\n model_name='series', name='cover_url', field=models.URLField(\n default=\n 'http://pcgsvdl00.bkt.clouddn.com/default/series/series_01.jpg',\n max_length=500, verbose_name='封面图')), migrations.AlterField(\n model_name='specialcolumn', name='cover_url', field=models.URLField\n (default=\n 'http://pcgsvdl00.bkt.clouddn.com/default/specialColumn/special_01.jpg'\n , max_length=500, verbose_name='封面图'))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.12 on 2018-07-26 19:11\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('articles', '0014_auto_20180726_0926'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='articles',\n name='cover_url',\n field=models.URLField(default='http://pcgsvdl00.bkt.clouddn.com/default/articles/article_01.jpg', max_length=500, verbose_name='封面图'),\n ),\n migrations.AlterField(\n model_name='series',\n name='cover_url',\n field=models.URLField(default='http://pcgsvdl00.bkt.clouddn.com/default/series/series_01.jpg', max_length=500, verbose_name='封面图'),\n ),\n migrations.AlterField(\n model_name='specialcolumn',\n name='cover_url',\n field=models.URLField(default='http://pcgsvdl00.bkt.clouddn.com/default/specialColumn/special_01.jpg', max_length=500, verbose_name='封面图'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from game import BaseGame
class First(BaseGame):
key = 'F'
code = 'FIRST'
short_description = 'Vinci se esce 1 o 2. x2.8'
long_description = (
'Si lancia un unico dado, se esce 1 o 2 vinci 2.8 volte quello che hai'
' puntato.')
min_bet = 20
multiplier = 2.8
def has_won(self, draws):
return draws[0] in (1, 2)
|
normal
|
{
"blob_id": "81fa3129d971fe8296a89a7b772d61ff50a8b9f7",
"index": 9284,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass First(BaseGame):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def has_won(self, draws):\n return draws[0] in (1, 2)\n",
"step-3": "<mask token>\n\n\nclass First(BaseGame):\n key = 'F'\n code = 'FIRST'\n short_description = 'Vinci se esce 1 o 2. x2.8'\n long_description = (\n 'Si lancia un unico dado, se esce 1 o 2 vinci 2.8 volte quello che hai puntato.'\n )\n min_bet = 20\n multiplier = 2.8\n\n def has_won(self, draws):\n return draws[0] in (1, 2)\n",
"step-4": "from game import BaseGame\n\n\nclass First(BaseGame):\n key = 'F'\n code = 'FIRST'\n short_description = 'Vinci se esce 1 o 2. x2.8'\n long_description = (\n 'Si lancia un unico dado, se esce 1 o 2 vinci 2.8 volte quello che hai puntato.'\n )\n min_bet = 20\n multiplier = 2.8\n\n def has_won(self, draws):\n return draws[0] in (1, 2)\n",
"step-5": "from game import BaseGame\n\n\nclass First(BaseGame):\n key = 'F'\n code = 'FIRST'\n short_description = 'Vinci se esce 1 o 2. x2.8'\n long_description = (\n 'Si lancia un unico dado, se esce 1 o 2 vinci 2.8 volte quello che hai'\n ' puntato.')\n min_bet = 20\n multiplier = 2.8\n\n def has_won(self, draws):\n return draws[0] in (1, 2)\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def index():
headline = "Hello world from a variable!"
# headline de la izq es el nombre de la variable en la vista
# headline de la der es el nombre de la variable en el server
return render_template("index.html", headline=headline)
# Ahora usamos el mismo idex.html pero con un contenido distinto!
@app.route("/bye/")
def bye():
headline = "Goodbye!"
return render_template("index.html", headline=headline)
|
normal
|
{
"blob_id": "83bbb6433d1577be869bf840bdd42aa86e415da6",
"index": 9328,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/')\ndef index():\n headline = 'Hello world from a variable!'\n return render_template('index.html', headline=headline)\n\n\[email protected]('/bye/')\ndef bye():\n headline = 'Goodbye!'\n return render_template('index.html', headline=headline)\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\[email protected]('/')\ndef index():\n headline = 'Hello world from a variable!'\n return render_template('index.html', headline=headline)\n\n\[email protected]('/bye/')\ndef bye():\n headline = 'Goodbye!'\n return render_template('index.html', headline=headline)\n",
"step-4": "from flask import Flask, render_template\napp = Flask(__name__)\n\n\[email protected]('/')\ndef index():\n headline = 'Hello world from a variable!'\n return render_template('index.html', headline=headline)\n\n\[email protected]('/bye/')\ndef bye():\n headline = 'Goodbye!'\n return render_template('index.html', headline=headline)\n",
"step-5": "from flask import Flask, render_template\n\napp = Flask(__name__)\n\[email protected](\"/\")\ndef index():\n headline = \"Hello world from a variable!\"\n # headline de la izq es el nombre de la variable en la vista\n # headline de la der es el nombre de la variable en el server\n return render_template(\"index.html\", headline=headline)\n\n# Ahora usamos el mismo idex.html pero con un contenido distinto!\[email protected](\"/bye/\")\ndef bye():\n headline = \"Goodbye!\"\n return render_template(\"index.html\", headline=headline)",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
# %%
import numpy as np
from numpy import sin, cos, pi
import gym
import seagul.envs
from seagul.integration import rk4,euler
from control import lqr, ctrb
from torch.multiprocessing import Pool
import matplotlib.pyplot as plt
import matplotlib
#matplotlib.use('Qt5Agg')
import time
global_start = time.time()
# %%
m1 = 1
m2 = 1
l1 = 1
l2 = 2
lc1 = .5
lc2 = 1
I1 = .083
I2 = .33
g = 9.8
#
# m1 = 1
# m2 = 1
# l1 = 1
# l2 = 1
# lc1 = .5
# lc2 = .5
# I1 = .2
# I2 = 1.0
# g = 9.8
dt = .01
max_torque = 25
integrator = euler
Q = np.identity(4)
Q[0, 0] = 1
Q[1, 1] = 1
Q[2, 2] = 1
Q[3, 3] = 1
#
# Q = np.array([[1000, -500, 0,0],[-500, 1000, 0, 0],[0, 0, 1000, -500],[0,0,-500,1000]])
R = np.identity(2) * .01
eval_max_t = 10
th1 = pi / 2
th2 = 0
th1d = 0
th2d = 0
TAU = np.array([[0], [1]])
m11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)) + I1 + I2
m22 = m2 * lc2 ** 2 + I2
m12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2
M = np.array([[m11, m12], [m12, m22]])
h1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2) * th2d * th1d
h2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2
H = np.array([[h1], [h2]])
phi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)
phi2 = m2 * lc2 * g * cos(th1 + th2)
PHI = np.array([[phi1], [phi2]])
Bl = np.linalg.inv(M) @ TAU
Blin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])
DPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-m2 * lc2 * g, -m2 * lc2 * g]])
Al = -np.linalg.inv(M) @ DPHI
Alin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [Al[1, 0], Al[1, 1], 0, 0]])
Ctr = ctrb(Alin, Blin)
assert np.linalg.matrix_rank(Ctr) == 4
K, S, E = lqr(Alin, Blin, Q, R)
k = np.array(K[1, :])
print(k)
def control(q):
gs = np.array([pi / 2, 0, 0, 0])
return -k.dot(q - gs)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
done = reward < 2
return reward, done
def do_rollout(args):
x, trial_num = args
th1, th2, dth1, dth2 = x
np.random.seed(trial_num)
local_reward_hist = np.ones((env.num_steps, 1)) * -1
obs = env.reset(init_vec=[th1, th2, dth1, dth2])
for i in range(env.num_steps):
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
obs, reward, done, _ = env.step(actions)
local_reward_hist[i, :] = np.copy(reward)
if done:
break
return local_reward_hist, i
# %%b
start = time.time()
config = {"init_state": [0, 0, 0, 0],
"max_torque": max_torque,
"init_state_weights": [0, 0, 0, 0],
"max_t" : 2.5,
"dt": dt,
"m2": m2,
"m1": m1,
"l1": l1,
"lc1": lc1,
"lc2": lc2,
"i1": I1,
"i2": I2,
"integrator" : integrator,
"reward_fn": reward_fn,
"act_hold": 1
}
env = gym.make('su_acrobot-v0', **config)
num_trials = 200000
reward_hist = np.zeros((num_trials, env.num_steps, 1))
X = np.zeros((num_trials, 4), dtype=np.float32)
Y = np.zeros((num_trials, 1), dtype=np.float32)
th1_min = pi / 2 - .5
th1_max = pi / 2 + .5
th2_min = -1
th2_max = 1
th1dot_min = -5
th1dot_max = 5
th2dot_min = -10
th2dot_max = 10
samples = np.random.random_sample((int(num_trials/2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min - th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
pool = Pool() # defaults to number of available CPU's
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(num_trials/2))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i, :] = samples[i, :]
Y[i] = sum(rews) > env.num_steps*3 - 10
th1_min = 0
th1_max = 2*pi
th2_min = -pi
th2_max = pi
th1dot_min = -10
th1dot_max = 10
th2dot_min = -30
th2dot_max = 30
samples = np.random.random_sample((int(num_trials/2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min - th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(num_trials/2), int(num_trials))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i+int(num_trials/2), :] = samples[i, :]
Y[i+int(num_trials/2)] = sum(rews) > env.num_steps*3 - 5
print(time.time() - start)
# %%
from seagul.nn import MLP, fit_model
import torch
net = MLP(4, 1, 2, 32) # output_activation=torch.nn.Softmax)
Y0 = np.ones((num_trials, 1), dtype=np.float32)
w = 1e-2
class_weight = torch.tensor(Y.shape[0]/sum(Y)*w, dtype=torch.float32)
loss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss(pos_weight=class_weight))
#loss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss())
# loss_hist = fit_model(net, X, Y, 100, batch_size=2048)
# loss_hist = fit_model(net, X, Y0, 5, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss(pos_weight=class_weight))
plt.close()
plt.plot(loss_hist)
plt.show()
# %%
n_thdot = 1
n_th = 1000
th1_vals = np.linspace(0, 2*pi, n_th)
th2_vals = np.linspace(-pi, pi, n_th)
th1dot_vals = np.linspace(-10, 10, n_th)
th2dot_vals = np.linspace(-30, 30, n_th)
sig = torch.nn.Sigmoid()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
from itertools import product
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
# generate 2 2d grids for the x & y bounds
x, y = np.meshgrid(th1_vals, th2_vals)
z = preds
# x and y are bounds, so z should be the value *inside* those bounds.
# Therefore, remove the last value from the z array.
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('Theta')
ax.set_xlabel('Th1')
ax.set_ylabel('Th2')
# set the limits of the plot to the limits of the data
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([pi/2, 0, th1dot_vals[i], th2dot_vals[j]])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
# generate 2 2d grids for the x & y bounds
x, y = np.meshgrid(th1dot_vals, th2dot_vals)
z = preds
# x and y are bounds, so z should be the value *inside* those bounds.
# Therefore, remove the last value from the z array.
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('DTheta')
ax.set_xlabel('dth1')
ax.set_ylabel('dth2')
# set the limits of the plot to the limits of the data
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
# %%
torch.set_default_dtype(torch.float32)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
return reward, False
def do_rollout(trial_num):
np.random.seed(trial_num)
act_hold = 20
hold_count = 0
obs = env.reset()
local_lqr = False
actions = np.random.randn(1) * 3
local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0]))
local_reward_hist = np.zeros((env.num_steps, 1))
local_gate_hist = np.zeros((env.num_steps, 1))
local_action_hist = np.zeros((env.num_steps, 1))
for i in range(env.num_steps):
obs = np.array(obs, dtype=np.float32)
if sig(net(obs)) > .85:
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
local_lqr = True
local_gate_hist[i] = 1
else:
local_gate_hist[i] = 0
if hold_count == act_hold:
actions = np.random.randn(1) * 3
hold_count = 0
hold_count += 1
obs, reward, done, _ = env.step(actions)
local_action_hist[i, :] = np.copy(actions)
local_state_hist[i, :] = np.copy(obs)
local_reward_hist[i, :] = np.copy(reward)
return local_action_hist, local_state_hist, local_reward_hist, local_gate_hist, local_lqr
config = {"init_state": [-pi / 2, 0, 0, 0],
"max_torque": max_torque,
"init_state_weights": [1, 1, 5, 5],
"dt": dt,
"m2": m2,
"m1": m1,
"l1": l1,
"lc1": lc1,
"lc2": lc2,
"i1": I1,
"i2": I2,
"integrator" : integrator,
"reward_fn": reward_fn,
"act_hold": 1,
"max_t" : 10
}
env = gym.make('su_acrobot-v0', **config)
num_trials = 1000
action_hist = np.zeros((num_trials, env.num_steps, 1))
state_hist = np.zeros((num_trials, env.num_steps, env.observation_space.shape[0]))
reward_hist = np.zeros((num_trials, env.num_steps, 1))
gate_hist = np.zeros((num_trials, env.num_steps, 1))
err_hist = np.zeros((num_trials, 1))
lqr_list = []
success_list = []
act_hold = 20
hold_count = 0
obs = env.reset()
start = time.time()
pool = Pool() # defaults to number of available CPU's
for i, res in enumerate(pool.imap(do_rollout,range(num_trials))):
acts, obs, rews, gate, lqr_on = res
action_hist[i, :, :] = acts
state_hist[i, :, :] = obs
reward_hist[i, :, :] = rews
gate_hist[i, :, :] = gate
err_hist[i] = (np.sqrt(sum(((state_hist[i, -1, :] - np.array([pi / 2, 0, 0, 0])) ** 2))))
if lqr_on:
lqr_list.append(i)
#print(err_hist[i])
#print(reward_hist[i,-1])
if err_hist[i] < 2:
success_list.append(i)
#
# for i in (range(num_trials)):
# res = do_rollout(i)
# acts, obs, rews, gate, lqr_on = res
# action_hist[i, :, :] = acts
# state_hist[i, :, :] = obs
# reward_hist[i, :, :] = rews
# gate_hist[i, :, :] = gate
# err_hist[i] = (np.sqrt(sum(((state_hist[i, -1, :] - np.array([pi / 2, 0, 0, 0])) ** 2))))
# if lqr_on:
# lqr_list.append(i)
# #print(err_hist[i])
# #print(reward_hist[i,-1])
# if err_hist[i] < 2:
# success_list.append(i)
print(len(lqr_list))
print(len(success_list))
print((time.time() - global_start) / 60)
|
normal
|
{
"blob_id": "358d4573ff386d6874d5bb5decfe71c71141bf1c",
"index": 2525,
"step-1": "<mask token>\n\n\ndef control(q):\n gs = np.array([pi / 2, 0, 0, 0])\n return -k.dot(q - gs)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n done = reward < 2\n return reward, done\n\n\ndef do_rollout(args):\n x, trial_num = args\n th1, th2, dth1, dth2 = x\n np.random.seed(trial_num)\n local_reward_hist = np.ones((env.num_steps, 1)) * -1\n obs = env.reset(init_vec=[th1, th2, dth1, dth2])\n for i in range(env.num_steps):\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)\n obs, reward, done, _ = env.step(actions)\n local_reward_hist[i, :] = np.copy(reward)\n if done:\n break\n return local_reward_hist, i\n\n\n<mask token>\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n return reward, False\n\n\ndef do_rollout(trial_num):\n np.random.seed(trial_num)\n act_hold = 20\n hold_count = 0\n obs = env.reset()\n local_lqr = False\n actions = np.random.randn(1) * 3\n local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])\n )\n local_reward_hist = np.zeros((env.num_steps, 1))\n local_gate_hist = np.zeros((env.num_steps, 1))\n local_action_hist = np.zeros((env.num_steps, 1))\n for i in range(env.num_steps):\n obs = np.array(obs, dtype=np.float32)\n if sig(net(obs)) > 0.85:\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque\n )\n local_lqr = True\n local_gate_hist[i] = 1\n else:\n local_gate_hist[i] = 0\n if hold_count == act_hold:\n actions = np.random.randn(1) * 3\n hold_count = 0\n hold_count += 1\n obs, reward, done, _ = env.step(actions)\n local_action_hist[i, :] = np.copy(actions)\n local_state_hist[i, :] = np.copy(obs)\n local_reward_hist[i, :] = np.copy(reward)\n return (local_action_hist, local_state_hist, local_reward_hist,\n local_gate_hist, local_lqr)\n\n\n<mask token>\n",
"step-2": "<mask token>\nassert np.linalg.matrix_rank(Ctr) == 4\n<mask token>\nprint(k)\n\n\ndef control(q):\n gs = np.array([pi / 2, 0, 0, 0])\n return -k.dot(q - gs)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n done = reward < 2\n return reward, done\n\n\ndef do_rollout(args):\n x, trial_num = args\n th1, th2, dth1, dth2 = x\n np.random.seed(trial_num)\n local_reward_hist = np.ones((env.num_steps, 1)) * -1\n obs = env.reset(init_vec=[th1, th2, dth1, dth2])\n for i in range(env.num_steps):\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)\n obs, reward, done, _ = env.step(actions)\n local_reward_hist[i, :] = np.copy(reward)\n if done:\n break\n return local_reward_hist, i\n\n\n<mask token>\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -\n th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\n<mask token>\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(\n num_trials / 2))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i, :] = samples[i, :]\n Y[i] = sum(rews) > env.num_steps * 3 - 10\n<mask token>\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -\n th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\n<mask token>\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(\n num_trials / 2), int(num_trials))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i + int(num_trials / 2), :] = samples[i, :]\n Y[i + int(num_trials / 2)] = sum(rews) > env.num_steps * 3 - 5\nprint(time.time() - start)\n<mask token>\nplt.close()\nplt.plot(loss_hist)\nplt.show()\n<mask token>\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])\n<mask token>\nprint(end - start)\n<mask token>\nax.set_title('Theta')\nax.set_xlabel('Th1')\nax.set_ylabel('Th2')\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\n<mask token>\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([pi / 2, 0, th1dot_vals[i], th2dot_vals[j]])\n<mask token>\nprint(end - start)\n<mask token>\nax.set_title('DTheta')\nax.set_xlabel('dth1')\nax.set_ylabel('dth2')\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\ntorch.set_default_dtype(torch.float32)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n return reward, False\n\n\ndef do_rollout(trial_num):\n np.random.seed(trial_num)\n act_hold = 20\n hold_count = 0\n obs = env.reset()\n local_lqr = False\n actions = np.random.randn(1) * 3\n local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])\n )\n local_reward_hist = np.zeros((env.num_steps, 1))\n local_gate_hist = np.zeros((env.num_steps, 1))\n local_action_hist = np.zeros((env.num_steps, 1))\n for i in range(env.num_steps):\n obs = np.array(obs, dtype=np.float32)\n if sig(net(obs)) > 0.85:\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque\n )\n local_lqr = True\n local_gate_hist[i] = 1\n else:\n local_gate_hist[i] = 0\n if hold_count == act_hold:\n actions = np.random.randn(1) * 3\n hold_count = 0\n hold_count += 1\n obs, reward, done, _ = env.step(actions)\n local_action_hist[i, :] = np.copy(actions)\n local_state_hist[i, :] = np.copy(obs)\n local_reward_hist[i, :] = np.copy(reward)\n return (local_action_hist, local_state_hist, local_reward_hist,\n local_gate_hist, local_lqr)\n\n\n<mask token>\nfor i, res in enumerate(pool.imap(do_rollout, range(num_trials))):\n acts, obs, rews, gate, lqr_on = res\n action_hist[i, :, :] = acts\n state_hist[i, :, :] = obs\n reward_hist[i, :, :] = rews\n gate_hist[i, :, :] = gate\n err_hist[i] = np.sqrt(sum((state_hist[i, -1, :] - np.array([pi / 2, 0, \n 0, 0])) ** 2))\n if lqr_on:\n lqr_list.append(i)\n if err_hist[i] < 2:\n success_list.append(i)\nprint(len(lqr_list))\nprint(len(success_list))\nprint((time.time() - global_start) / 60)\n",
"step-3": "<mask token>\nglobal_start = time.time()\nm1 = 1\nm2 = 1\nl1 = 1\nl2 = 2\nlc1 = 0.5\nlc2 = 1\nI1 = 0.083\nI2 = 0.33\ng = 9.8\ndt = 0.01\nmax_torque = 25\nintegrator = euler\nQ = np.identity(4)\nQ[0, 0] = 1\nQ[1, 1] = 1\nQ[2, 2] = 1\nQ[3, 3] = 1\nR = np.identity(2) * 0.01\neval_max_t = 10\nth1 = pi / 2\nth2 = 0\nth1d = 0\nth2d = 0\nTAU = np.array([[0], [1]])\nm11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)\n ) + I1 + I2\nm22 = m2 * lc2 ** 2 + I2\nm12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2\nM = np.array([[m11, m12], [m12, m22]])\nh1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2\n ) * th2d * th1d\nh2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2\nH = np.array([[h1], [h2]])\nphi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)\nphi2 = m2 * lc2 * g * cos(th1 + th2)\nPHI = np.array([[phi1], [phi2]])\nBl = np.linalg.inv(M) @ TAU\nBlin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])\nDPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-\n m2 * lc2 * g, -m2 * lc2 * g]])\nAl = -np.linalg.inv(M) @ DPHI\nAlin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [\n Al[1, 0], Al[1, 1], 0, 0]])\nCtr = ctrb(Alin, Blin)\nassert np.linalg.matrix_rank(Ctr) == 4\nK, S, E = lqr(Alin, Blin, Q, R)\nk = np.array(K[1, :])\nprint(k)\n\n\ndef control(q):\n gs = np.array([pi / 2, 0, 0, 0])\n return -k.dot(q - gs)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n done = reward < 2\n return reward, done\n\n\ndef do_rollout(args):\n x, trial_num = args\n th1, th2, dth1, dth2 = x\n np.random.seed(trial_num)\n local_reward_hist = np.ones((env.num_steps, 1)) * -1\n obs = env.reset(init_vec=[th1, th2, dth1, dth2])\n for i in range(env.num_steps):\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)\n obs, reward, done, _ = env.step(actions)\n local_reward_hist[i, :] = np.copy(reward)\n if done:\n break\n return local_reward_hist, i\n\n\nstart = time.time()\nconfig = {'init_state': [0, 0, 0, 0], 'max_torque': max_torque,\n 'init_state_weights': [0, 0, 0, 0], 'max_t': 2.5, 'dt': dt, 'm2': m2,\n 'm1': m1, 'l1': l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2,\n 'integrator': integrator, 'reward_fn': reward_fn, 'act_hold': 1}\nenv = gym.make('su_acrobot-v0', **config)\nnum_trials = 200000\nreward_hist = np.zeros((num_trials, env.num_steps, 1))\nX = np.zeros((num_trials, 4), dtype=np.float32)\nY = np.zeros((num_trials, 1), dtype=np.float32)\nth1_min = pi / 2 - 0.5\nth1_max = pi / 2 + 0.5\nth2_min = -1\nth2_max = 1\nth1dot_min = -5\nth1dot_max = 5\nth2dot_min = -10\nth2dot_max = 10\nsamples = np.random.random_sample((int(num_trials / 2), 4))\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -\n th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\ntotal_steps = 0\npool = Pool()\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(\n num_trials / 2))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i, :] = samples[i, :]\n Y[i] = sum(rews) > env.num_steps * 3 - 10\nth1_min = 0\nth1_max = 2 * pi\nth2_min = -pi\nth2_max = pi\nth1dot_min = -10\nth1dot_max = 10\nth2dot_min = -30\nth2dot_max = 30\nsamples = np.random.random_sample((int(num_trials / 2), 4))\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -\n th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\ntotal_steps = 0\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(\n num_trials / 2), int(num_trials))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i + int(num_trials / 2), :] = samples[i, :]\n Y[i + int(num_trials / 2)] = sum(rews) > env.num_steps * 3 - 5\nprint(time.time() - start)\n<mask token>\nnet = MLP(4, 1, 2, 32)\nY0 = np.ones((num_trials, 1), dtype=np.float32)\nw = 0.01\nclass_weight = torch.tensor(Y.shape[0] / sum(Y) * w, dtype=torch.float32)\nloss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.\n BCEWithLogitsLoss(pos_weight=class_weight))\nplt.close()\nplt.plot(loss_hist)\nplt.show()\nn_thdot = 1\nn_th = 1000\nth1_vals = np.linspace(0, 2 * pi, n_th)\nth2_vals = np.linspace(-pi, pi, n_th)\nth1dot_vals = np.linspace(-10, 10, n_th)\nth2dot_vals = np.linspace(-30, 30, n_th)\nsig = torch.nn.Sigmoid()\ncoords = np.zeros((n_th, n_th, 4), dtype=np.float32)\n<mask token>\nstart = time.time()\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])\npreds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())\nend = time.time()\nprint(end - start)\nfig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))\nx, y = np.meshgrid(th1_vals, th2_vals)\nz = preds\nz = z[:-1, :-1]\nz_min, z_max = 0, np.abs(z).max()\nc = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)\nax.set_title('Theta')\nax.set_xlabel('Th1')\nax.set_ylabel('Th2')\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\ncoords = np.zeros((n_th, n_th, 4), dtype=np.float32)\nstart = time.time()\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([pi / 2, 0, th1dot_vals[i], th2dot_vals[j]])\npreds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())\nend = time.time()\nprint(end - start)\nfig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))\nx, y = np.meshgrid(th1dot_vals, th2dot_vals)\nz = preds\nz = z[:-1, :-1]\nz_min, z_max = 0, np.abs(z).max()\nc = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)\nax.set_title('DTheta')\nax.set_xlabel('dth1')\nax.set_ylabel('dth2')\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\ntorch.set_default_dtype(torch.float32)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n return reward, False\n\n\ndef do_rollout(trial_num):\n np.random.seed(trial_num)\n act_hold = 20\n hold_count = 0\n obs = env.reset()\n local_lqr = False\n actions = np.random.randn(1) * 3\n local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])\n )\n local_reward_hist = np.zeros((env.num_steps, 1))\n local_gate_hist = np.zeros((env.num_steps, 1))\n local_action_hist = np.zeros((env.num_steps, 1))\n for i in range(env.num_steps):\n obs = np.array(obs, dtype=np.float32)\n if sig(net(obs)) > 0.85:\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque\n )\n local_lqr = True\n local_gate_hist[i] = 1\n else:\n local_gate_hist[i] = 0\n if hold_count == act_hold:\n actions = np.random.randn(1) * 3\n hold_count = 0\n hold_count += 1\n obs, reward, done, _ = env.step(actions)\n local_action_hist[i, :] = np.copy(actions)\n local_state_hist[i, :] = np.copy(obs)\n local_reward_hist[i, :] = np.copy(reward)\n return (local_action_hist, local_state_hist, local_reward_hist,\n local_gate_hist, local_lqr)\n\n\nconfig = {'init_state': [-pi / 2, 0, 0, 0], 'max_torque': max_torque,\n 'init_state_weights': [1, 1, 5, 5], 'dt': dt, 'm2': m2, 'm1': m1, 'l1':\n l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2, 'integrator':\n integrator, 'reward_fn': reward_fn, 'act_hold': 1, 'max_t': 10}\nenv = gym.make('su_acrobot-v0', **config)\nnum_trials = 1000\naction_hist = np.zeros((num_trials, env.num_steps, 1))\nstate_hist = np.zeros((num_trials, env.num_steps, env.observation_space.\n shape[0]))\nreward_hist = np.zeros((num_trials, env.num_steps, 1))\ngate_hist = np.zeros((num_trials, env.num_steps, 1))\nerr_hist = np.zeros((num_trials, 1))\nlqr_list = []\nsuccess_list = []\nact_hold = 20\nhold_count = 0\nobs = env.reset()\nstart = time.time()\npool = Pool()\nfor i, res in enumerate(pool.imap(do_rollout, range(num_trials))):\n acts, obs, rews, gate, lqr_on = res\n action_hist[i, :, :] = acts\n state_hist[i, :, :] = obs\n reward_hist[i, :, :] = rews\n gate_hist[i, :, :] = gate\n err_hist[i] = np.sqrt(sum((state_hist[i, -1, :] - np.array([pi / 2, 0, \n 0, 0])) ** 2))\n if lqr_on:\n lqr_list.append(i)\n if err_hist[i] < 2:\n success_list.append(i)\nprint(len(lqr_list))\nprint(len(success_list))\nprint((time.time() - global_start) / 60)\n",
"step-4": "import numpy as np\nfrom numpy import sin, cos, pi\nimport gym\nimport seagul.envs\nfrom seagul.integration import rk4, euler\nfrom control import lqr, ctrb\nfrom torch.multiprocessing import Pool\nimport matplotlib.pyplot as plt\nimport matplotlib\nimport time\nglobal_start = time.time()\nm1 = 1\nm2 = 1\nl1 = 1\nl2 = 2\nlc1 = 0.5\nlc2 = 1\nI1 = 0.083\nI2 = 0.33\ng = 9.8\ndt = 0.01\nmax_torque = 25\nintegrator = euler\nQ = np.identity(4)\nQ[0, 0] = 1\nQ[1, 1] = 1\nQ[2, 2] = 1\nQ[3, 3] = 1\nR = np.identity(2) * 0.01\neval_max_t = 10\nth1 = pi / 2\nth2 = 0\nth1d = 0\nth2d = 0\nTAU = np.array([[0], [1]])\nm11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)\n ) + I1 + I2\nm22 = m2 * lc2 ** 2 + I2\nm12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2\nM = np.array([[m11, m12], [m12, m22]])\nh1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2\n ) * th2d * th1d\nh2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2\nH = np.array([[h1], [h2]])\nphi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)\nphi2 = m2 * lc2 * g * cos(th1 + th2)\nPHI = np.array([[phi1], [phi2]])\nBl = np.linalg.inv(M) @ TAU\nBlin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])\nDPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-\n m2 * lc2 * g, -m2 * lc2 * g]])\nAl = -np.linalg.inv(M) @ DPHI\nAlin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [\n Al[1, 0], Al[1, 1], 0, 0]])\nCtr = ctrb(Alin, Blin)\nassert np.linalg.matrix_rank(Ctr) == 4\nK, S, E = lqr(Alin, Blin, Q, R)\nk = np.array(K[1, :])\nprint(k)\n\n\ndef control(q):\n gs = np.array([pi / 2, 0, 0, 0])\n return -k.dot(q - gs)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n done = reward < 2\n return reward, done\n\n\ndef do_rollout(args):\n x, trial_num = args\n th1, th2, dth1, dth2 = x\n np.random.seed(trial_num)\n local_reward_hist = np.ones((env.num_steps, 1)) * -1\n obs = env.reset(init_vec=[th1, th2, dth1, dth2])\n for i in range(env.num_steps):\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)\n obs, reward, done, _ = env.step(actions)\n local_reward_hist[i, :] = np.copy(reward)\n if done:\n break\n return local_reward_hist, i\n\n\nstart = time.time()\nconfig = {'init_state': [0, 0, 0, 0], 'max_torque': max_torque,\n 'init_state_weights': [0, 0, 0, 0], 'max_t': 2.5, 'dt': dt, 'm2': m2,\n 'm1': m1, 'l1': l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2,\n 'integrator': integrator, 'reward_fn': reward_fn, 'act_hold': 1}\nenv = gym.make('su_acrobot-v0', **config)\nnum_trials = 200000\nreward_hist = np.zeros((num_trials, env.num_steps, 1))\nX = np.zeros((num_trials, 4), dtype=np.float32)\nY = np.zeros((num_trials, 1), dtype=np.float32)\nth1_min = pi / 2 - 0.5\nth1_max = pi / 2 + 0.5\nth2_min = -1\nth2_max = 1\nth1dot_min = -5\nth1dot_max = 5\nth2dot_min = -10\nth2dot_max = 10\nsamples = np.random.random_sample((int(num_trials / 2), 4))\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -\n th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\ntotal_steps = 0\npool = Pool()\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(\n num_trials / 2))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i, :] = samples[i, :]\n Y[i] = sum(rews) > env.num_steps * 3 - 10\nth1_min = 0\nth1_max = 2 * pi\nth2_min = -pi\nth2_max = pi\nth1dot_min = -10\nth1dot_max = 10\nth2dot_min = -30\nth2dot_max = 30\nsamples = np.random.random_sample((int(num_trials / 2), 4))\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -\n th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\ntotal_steps = 0\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(\n num_trials / 2), int(num_trials))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i + int(num_trials / 2), :] = samples[i, :]\n Y[i + int(num_trials / 2)] = sum(rews) > env.num_steps * 3 - 5\nprint(time.time() - start)\nfrom seagul.nn import MLP, fit_model\nimport torch\nnet = MLP(4, 1, 2, 32)\nY0 = np.ones((num_trials, 1), dtype=np.float32)\nw = 0.01\nclass_weight = torch.tensor(Y.shape[0] / sum(Y) * w, dtype=torch.float32)\nloss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.\n BCEWithLogitsLoss(pos_weight=class_weight))\nplt.close()\nplt.plot(loss_hist)\nplt.show()\nn_thdot = 1\nn_th = 1000\nth1_vals = np.linspace(0, 2 * pi, n_th)\nth2_vals = np.linspace(-pi, pi, n_th)\nth1dot_vals = np.linspace(-10, 10, n_th)\nth2dot_vals = np.linspace(-30, 30, n_th)\nsig = torch.nn.Sigmoid()\ncoords = np.zeros((n_th, n_th, 4), dtype=np.float32)\nfrom itertools import product\nstart = time.time()\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])\npreds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())\nend = time.time()\nprint(end - start)\nfig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))\nx, y = np.meshgrid(th1_vals, th2_vals)\nz = preds\nz = z[:-1, :-1]\nz_min, z_max = 0, np.abs(z).max()\nc = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)\nax.set_title('Theta')\nax.set_xlabel('Th1')\nax.set_ylabel('Th2')\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\ncoords = np.zeros((n_th, n_th, 4), dtype=np.float32)\nstart = time.time()\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([pi / 2, 0, th1dot_vals[i], th2dot_vals[j]])\npreds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())\nend = time.time()\nprint(end - start)\nfig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))\nx, y = np.meshgrid(th1dot_vals, th2dot_vals)\nz = preds\nz = z[:-1, :-1]\nz_min, z_max = 0, np.abs(z).max()\nc = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)\nax.set_title('DTheta')\nax.set_xlabel('dth1')\nax.set_ylabel('dth2')\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\ntorch.set_default_dtype(torch.float32)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n return reward, False\n\n\ndef do_rollout(trial_num):\n np.random.seed(trial_num)\n act_hold = 20\n hold_count = 0\n obs = env.reset()\n local_lqr = False\n actions = np.random.randn(1) * 3\n local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])\n )\n local_reward_hist = np.zeros((env.num_steps, 1))\n local_gate_hist = np.zeros((env.num_steps, 1))\n local_action_hist = np.zeros((env.num_steps, 1))\n for i in range(env.num_steps):\n obs = np.array(obs, dtype=np.float32)\n if sig(net(obs)) > 0.85:\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque\n )\n local_lqr = True\n local_gate_hist[i] = 1\n else:\n local_gate_hist[i] = 0\n if hold_count == act_hold:\n actions = np.random.randn(1) * 3\n hold_count = 0\n hold_count += 1\n obs, reward, done, _ = env.step(actions)\n local_action_hist[i, :] = np.copy(actions)\n local_state_hist[i, :] = np.copy(obs)\n local_reward_hist[i, :] = np.copy(reward)\n return (local_action_hist, local_state_hist, local_reward_hist,\n local_gate_hist, local_lqr)\n\n\nconfig = {'init_state': [-pi / 2, 0, 0, 0], 'max_torque': max_torque,\n 'init_state_weights': [1, 1, 5, 5], 'dt': dt, 'm2': m2, 'm1': m1, 'l1':\n l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2, 'integrator':\n integrator, 'reward_fn': reward_fn, 'act_hold': 1, 'max_t': 10}\nenv = gym.make('su_acrobot-v0', **config)\nnum_trials = 1000\naction_hist = np.zeros((num_trials, env.num_steps, 1))\nstate_hist = np.zeros((num_trials, env.num_steps, env.observation_space.\n shape[0]))\nreward_hist = np.zeros((num_trials, env.num_steps, 1))\ngate_hist = np.zeros((num_trials, env.num_steps, 1))\nerr_hist = np.zeros((num_trials, 1))\nlqr_list = []\nsuccess_list = []\nact_hold = 20\nhold_count = 0\nobs = env.reset()\nstart = time.time()\npool = Pool()\nfor i, res in enumerate(pool.imap(do_rollout, range(num_trials))):\n acts, obs, rews, gate, lqr_on = res\n action_hist[i, :, :] = acts\n state_hist[i, :, :] = obs\n reward_hist[i, :, :] = rews\n gate_hist[i, :, :] = gate\n err_hist[i] = np.sqrt(sum((state_hist[i, -1, :] - np.array([pi / 2, 0, \n 0, 0])) ** 2))\n if lqr_on:\n lqr_list.append(i)\n if err_hist[i] < 2:\n success_list.append(i)\nprint(len(lqr_list))\nprint(len(success_list))\nprint((time.time() - global_start) / 60)\n",
"step-5": "# %%\nimport numpy as np\nfrom numpy import sin, cos, pi\nimport gym\nimport seagul.envs\n\nfrom seagul.integration import rk4,euler\nfrom control import lqr, ctrb\nfrom torch.multiprocessing import Pool\nimport matplotlib.pyplot as plt\nimport matplotlib\n\n#matplotlib.use('Qt5Agg')\n\nimport time\n\nglobal_start = time.time()\n\n# %%\nm1 = 1\nm2 = 1\nl1 = 1\nl2 = 2\nlc1 = .5\nlc2 = 1\nI1 = .083\nI2 = .33\ng = 9.8\n\n#\n# m1 = 1\n# m2 = 1\n# l1 = 1\n# l2 = 1\n# lc1 = .5\n# lc2 = .5\n# I1 = .2\n# I2 = 1.0\n# g = 9.8\n\n\ndt = .01\nmax_torque = 25\nintegrator = euler\n\nQ = np.identity(4)\nQ[0, 0] = 1\nQ[1, 1] = 1\nQ[2, 2] = 1\nQ[3, 3] = 1\n#\n# Q = np.array([[1000, -500, 0,0],[-500, 1000, 0, 0],[0, 0, 1000, -500],[0,0,-500,1000]])\nR = np.identity(2) * .01\n\neval_max_t = 10\n\n\nth1 = pi / 2\nth2 = 0\nth1d = 0\nth2d = 0\n\nTAU = np.array([[0], [1]])\n\nm11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)) + I1 + I2\nm22 = m2 * lc2 ** 2 + I2\nm12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2\nM = np.array([[m11, m12], [m12, m22]])\n\nh1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2) * th2d * th1d\nh2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2\nH = np.array([[h1], [h2]])\n\nphi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)\nphi2 = m2 * lc2 * g * cos(th1 + th2)\nPHI = np.array([[phi1], [phi2]])\n\nBl = np.linalg.inv(M) @ TAU\nBlin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])\n\nDPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-m2 * lc2 * g, -m2 * lc2 * g]])\nAl = -np.linalg.inv(M) @ DPHI\nAlin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [Al[1, 0], Al[1, 1], 0, 0]])\n\nCtr = ctrb(Alin, Blin)\nassert np.linalg.matrix_rank(Ctr) == 4\n\nK, S, E = lqr(Alin, Blin, Q, R)\nk = np.array(K[1, :])\nprint(k)\n\n\ndef control(q):\n gs = np.array([pi / 2, 0, 0, 0])\n return -k.dot(q - gs)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n done = reward < 2\n return reward, done\n\n\ndef do_rollout(args):\n x, trial_num = args\n th1, th2, dth1, dth2 = x\n np.random.seed(trial_num)\n local_reward_hist = np.ones((env.num_steps, 1)) * -1\n obs = env.reset(init_vec=[th1, th2, dth1, dth2])\n\n for i in range(env.num_steps):\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)\n obs, reward, done, _ = env.step(actions)\n local_reward_hist[i, :] = np.copy(reward)\n if done:\n break\n\n return local_reward_hist, i\n\n\n# %%b\nstart = time.time()\nconfig = {\"init_state\": [0, 0, 0, 0],\n \"max_torque\": max_torque,\n \"init_state_weights\": [0, 0, 0, 0],\n \"max_t\" : 2.5,\n \"dt\": dt,\n \"m2\": m2,\n \"m1\": m1,\n \"l1\": l1,\n \"lc1\": lc1,\n \"lc2\": lc2,\n \"i1\": I1,\n \"i2\": I2,\n \"integrator\" : integrator,\n \"reward_fn\": reward_fn,\n \"act_hold\": 1\n }\nenv = gym.make('su_acrobot-v0', **config)\n\nnum_trials = 200000\nreward_hist = np.zeros((num_trials, env.num_steps, 1))\n\nX = np.zeros((num_trials, 4), dtype=np.float32)\nY = np.zeros((num_trials, 1), dtype=np.float32)\n\nth1_min = pi / 2 - .5\nth1_max = pi / 2 + .5\nth2_min = -1\nth2_max = 1\nth1dot_min = -5\nth1dot_max = 5\nth2dot_min = -10\nth2dot_max = 10\n\nsamples = np.random.random_sample((int(num_trials/2), 4))\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min - th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\n\ntotal_steps = 0\npool = Pool() # defaults to number of available CPU's\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(num_trials/2))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i, :] = samples[i, :]\n Y[i] = sum(rews) > env.num_steps*3 - 10\n\n\nth1_min = 0\nth1_max = 2*pi\nth2_min = -pi\nth2_max = pi\nth1dot_min = -10\nth1dot_max = 10\nth2dot_min = -30\nth2dot_max = 30\n\nsamples = np.random.random_sample((int(num_trials/2), 4))\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min - th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\ntotal_steps = 0\n\n\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(num_trials/2), int(num_trials))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i+int(num_trials/2), :] = samples[i, :]\n Y[i+int(num_trials/2)] = sum(rews) > env.num_steps*3 - 5\n\n\nprint(time.time() - start)\n\n# %%\nfrom seagul.nn import MLP, fit_model\nimport torch\n\nnet = MLP(4, 1, 2, 32) # output_activation=torch.nn.Softmax)\nY0 = np.ones((num_trials, 1), dtype=np.float32)\n\nw = 1e-2\nclass_weight = torch.tensor(Y.shape[0]/sum(Y)*w, dtype=torch.float32)\n\nloss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss(pos_weight=class_weight))\n#loss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss())\n\n# loss_hist = fit_model(net, X, Y, 100, batch_size=2048)\n# loss_hist = fit_model(net, X, Y0, 5, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss(pos_weight=class_weight))\n\nplt.close()\nplt.plot(loss_hist)\nplt.show()\n\n# %%\n\n\nn_thdot = 1\nn_th = 1000\n\nth1_vals = np.linspace(0, 2*pi, n_th)\nth2_vals = np.linspace(-pi, pi, n_th)\n\nth1dot_vals = np.linspace(-10, 10, n_th)\nth2dot_vals = np.linspace(-30, 30, n_th)\n\nsig = torch.nn.Sigmoid()\n\ncoords = np.zeros((n_th, n_th, 4), dtype=np.float32)\n\nfrom itertools import product\n\nstart = time.time()\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])\n\npreds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())\n\nend = time.time()\n\nprint(end - start)\n\nfig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))\n# generate 2 2d grids for the x & y bounds\nx, y = np.meshgrid(th1_vals, th2_vals)\nz = preds\n\n# x and y are bounds, so z should be the value *inside* those bounds.\n# Therefore, remove the last value from the z array.\nz = z[:-1, :-1]\nz_min, z_max = 0, np.abs(z).max()\n\nc = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)\nax.set_title('Theta')\nax.set_xlabel('Th1')\nax.set_ylabel('Th2')\n\n# set the limits of the plot to the limits of the data\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\n\ncoords = np.zeros((n_th, n_th, 4), dtype=np.float32)\n\nstart = time.time()\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([pi/2, 0, th1dot_vals[i], th2dot_vals[j]])\n\npreds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())\nend = time.time()\n\nprint(end - start)\n\nfig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))\n# generate 2 2d grids for the x & y bounds\nx, y = np.meshgrid(th1dot_vals, th2dot_vals)\nz = preds\n\n# x and y are bounds, so z should be the value *inside* those bounds.\n# Therefore, remove the last value from the z array.\nz = z[:-1, :-1]\nz_min, z_max = 0, np.abs(z).max()\n\nc = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)\nax.set_title('DTheta')\nax.set_xlabel('dth1')\nax.set_ylabel('dth2')\n# set the limits of the plot to the limits of the data\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\n\n\n# %%\n\ntorch.set_default_dtype(torch.float32)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n return reward, False\n\n\ndef do_rollout(trial_num):\n np.random.seed(trial_num)\n act_hold = 20\n hold_count = 0\n\n obs = env.reset()\n local_lqr = False\n\n actions = np.random.randn(1) * 3\n\n local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0]))\n local_reward_hist = np.zeros((env.num_steps, 1))\n local_gate_hist = np.zeros((env.num_steps, 1))\n local_action_hist = np.zeros((env.num_steps, 1))\n\n for i in range(env.num_steps):\n obs = np.array(obs, dtype=np.float32)\n if sig(net(obs)) > .85:\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)\n local_lqr = True\n local_gate_hist[i] = 1\n else:\n local_gate_hist[i] = 0\n if hold_count == act_hold:\n actions = np.random.randn(1) * 3\n hold_count = 0\n\n hold_count += 1\n obs, reward, done, _ = env.step(actions)\n local_action_hist[i, :] = np.copy(actions)\n local_state_hist[i, :] = np.copy(obs)\n local_reward_hist[i, :] = np.copy(reward)\n\n return local_action_hist, local_state_hist, local_reward_hist, local_gate_hist, local_lqr\n\n\nconfig = {\"init_state\": [-pi / 2, 0, 0, 0],\n \"max_torque\": max_torque,\n \"init_state_weights\": [1, 1, 5, 5],\n \"dt\": dt,\n \"m2\": m2,\n \"m1\": m1,\n \"l1\": l1,\n \"lc1\": lc1,\n \"lc2\": lc2,\n \"i1\": I1,\n \"i2\": I2,\n \"integrator\" : integrator,\n \"reward_fn\": reward_fn,\n \"act_hold\": 1,\n \"max_t\" : 10\n }\n\nenv = gym.make('su_acrobot-v0', **config)\n\nnum_trials = 1000\naction_hist = np.zeros((num_trials, env.num_steps, 1))\nstate_hist = np.zeros((num_trials, env.num_steps, env.observation_space.shape[0]))\nreward_hist = np.zeros((num_trials, env.num_steps, 1))\ngate_hist = np.zeros((num_trials, env.num_steps, 1))\nerr_hist = np.zeros((num_trials, 1))\n\nlqr_list = []\nsuccess_list = []\n\nact_hold = 20\nhold_count = 0\nobs = env.reset()\n\nstart = time.time()\n\npool = Pool() # defaults to number of available CPU's\nfor i, res in enumerate(pool.imap(do_rollout,range(num_trials))):\n acts, obs, rews, gate, lqr_on = res\n action_hist[i, :, :] = acts\n state_hist[i, :, :] = obs\n reward_hist[i, :, :] = rews\n gate_hist[i, :, :] = gate\n err_hist[i] = (np.sqrt(sum(((state_hist[i, -1, :] - np.array([pi / 2, 0, 0, 0])) ** 2))))\n if lqr_on:\n lqr_list.append(i)\n #print(err_hist[i])\n #print(reward_hist[i,-1])\n if err_hist[i] < 2:\n success_list.append(i)\n\n#\n# for i in (range(num_trials)):\n# res = do_rollout(i)\n# acts, obs, rews, gate, lqr_on = res\n# action_hist[i, :, :] = acts\n# state_hist[i, :, :] = obs\n# reward_hist[i, :, :] = rews\n# gate_hist[i, :, :] = gate\n# err_hist[i] = (np.sqrt(sum(((state_hist[i, -1, :] - np.array([pi / 2, 0, 0, 0])) ** 2))))\n# if lqr_on:\n# lqr_list.append(i)\n# #print(err_hist[i])\n# #print(reward_hist[i,-1])\n# if err_hist[i] < 2:\n# success_list.append(i)\n\n\nprint(len(lqr_list))\nprint(len(success_list))\n\nprint((time.time() - global_start) / 60)\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in range(1, n - 1):
rem = n % i
if rem == 0:
sum = sum + i
if sum == n:
print('the number is perfect')
else:
print('not prime')
<|reserved_special_token_1|>
n = int(input('enter the number\n'))
sum = 0
for i in range(1, n - 1):
rem = n % i
if rem == 0:
sum = sum + i
if sum == n:
print('the number is perfect')
else:
print('not prime')
<|reserved_special_token_1|>
n=int(input("enter the number\n"))
sum=0
for i in range(1,n-1):
rem=n%i
if(rem==0):
sum=sum+i
if(sum==n):
print("the number is perfect")
else:
print("not prime")
|
flexible
|
{
"blob_id": "5721786b61cf8706b1d401a46d06f2d32153df8b",
"index": 765,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(1, n - 1):\n rem = n % i\n if rem == 0:\n sum = sum + i\nif sum == n:\n print('the number is perfect')\nelse:\n print('not prime')\n",
"step-3": "n = int(input('enter the number\\n'))\nsum = 0\nfor i in range(1, n - 1):\n rem = n % i\n if rem == 0:\n sum = sum + i\nif sum == n:\n print('the number is perfect')\nelse:\n print('not prime')\n",
"step-4": "n=int(input(\"enter the number\\n\"))\nsum=0\nfor i in range(1,n-1):\n rem=n%i\n if(rem==0):\n sum=sum+i\nif(sum==n):\n print(\"the number is perfect\")\nelse:\n print(\"not prime\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: UTF-8 -*-
from keywords.httpkeys1 import HTTP
http1 = HTTP()
# ip = '10.68.170.184:8080'
ip = '10.68.170.184:8080'
http1.post('http://'+ip+'/music_download/api/login','username=admin&password=123456')
# http1.savejson('result','id')
# http1.get('http://47.101.197.102:8080/music/api/user','{id}')
# data = {'username':'admin','password':'123456'}
# # json方式传递数据
# http1.postjson('http://47.101.197.102:8080/music/api/login',data=data)
# http1.savejson('result','id')
# http1.get('http://47.101.197.102:8080/music/api/user','{id}')
# http1.addheader('Content-type','multipart/form-data')
http1.upload('http://'+ip+'/music_download/api/song/upload','speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file=G:\\music_data\\1.mp3')
# http1.upload('http://10.68.170.184:8080/music/api/song/upload','filename=1.mp3&speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file1=G:/music_data/1.mp3')
|
normal
|
{
"blob_id": "68e09f72e8338efbef108ffd0c93eff067bf7b07",
"index": 135,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nhttp1.post('http://' + ip + '/music_download/api/login',\n 'username=admin&password=123456')\nhttp1.upload('http://' + ip + '/music_download/api/song/upload',\n 'speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file=G:\\\\music_data\\\\1.mp3'\n )\n",
"step-3": "<mask token>\nhttp1 = HTTP()\nip = '10.68.170.184:8080'\nhttp1.post('http://' + ip + '/music_download/api/login',\n 'username=admin&password=123456')\nhttp1.upload('http://' + ip + '/music_download/api/song/upload',\n 'speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file=G:\\\\music_data\\\\1.mp3'\n )\n",
"step-4": "from keywords.httpkeys1 import HTTP\nhttp1 = HTTP()\nip = '10.68.170.184:8080'\nhttp1.post('http://' + ip + '/music_download/api/login',\n 'username=admin&password=123456')\nhttp1.upload('http://' + ip + '/music_download/api/song/upload',\n 'speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file=G:\\\\music_data\\\\1.mp3'\n )\n",
"step-5": "# -*- coding: UTF-8 -*-\nfrom keywords.httpkeys1 import HTTP\n\nhttp1 = HTTP()\n\n# ip = '10.68.170.184:8080'\nip = '10.68.170.184:8080'\n\nhttp1.post('http://'+ip+'/music_download/api/login','username=admin&password=123456')\n# http1.savejson('result','id')\n# http1.get('http://47.101.197.102:8080/music/api/user','{id}')\n\n# data = {'username':'admin','password':'123456'}\n# # json方式传递数据\n# http1.postjson('http://47.101.197.102:8080/music/api/login',data=data)\n# http1.savejson('result','id')\n# http1.get('http://47.101.197.102:8080/music/api/user','{id}')\n\n# http1.addheader('Content-type','multipart/form-data')\n\nhttp1.upload('http://'+ip+'/music_download/api/song/upload','speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file=G:\\\\music_data\\\\1.mp3')\n\n# http1.upload('http://10.68.170.184:8080/music/api/song/upload','filename=1.mp3&speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file1=G:/music_data/1.mp3')\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def partition(data, l, h):
i = l
j = h
pivot = data[l]
while i < j:
while data[i] <= pivot and i <= h - 1:
i = i + 1
while data[j] > pivot and j >= l + 1:
j = j - 1
if i < j:
data[i], data[j] = data[j], data[i]
data[l], data[j] = data[j], data[l]
return j
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def partition(data, l, h):
i = l
j = h
pivot = data[l]
while i < j:
while data[i] <= pivot and i <= h - 1:
i = i + 1
while data[j] > pivot and j >= l + 1:
j = j - 1
if i < j:
data[i], data[j] = data[j], data[i]
data[l], data[j] = data[j], data[l]
return j
def quickSort(data, l, h):
if l < h:
divider = partition(data, l, h)
quickSort(data, l, divider - 1)
quickSort(data, divider + 1, h)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def partition(data, l, h):
i = l
j = h
pivot = data[l]
while i < j:
while data[i] <= pivot and i <= h - 1:
i = i + 1
while data[j] > pivot and j >= l + 1:
j = j - 1
if i < j:
data[i], data[j] = data[j], data[i]
data[l], data[j] = data[j], data[l]
return j
def quickSort(data, l, h):
if l < h:
divider = partition(data, l, h)
quickSort(data, l, divider - 1)
quickSort(data, divider + 1, h)
quickSort(data, 0, len(data) - 1)
print(data)
<|reserved_special_token_1|>
data = [1, 4, 2, 3, 6, 8, 9, 7]
def partition(data, l, h):
i = l
j = h
pivot = data[l]
while i < j:
while data[i] <= pivot and i <= h - 1:
i = i + 1
while data[j] > pivot and j >= l + 1:
j = j - 1
if i < j:
data[i], data[j] = data[j], data[i]
data[l], data[j] = data[j], data[l]
return j
def quickSort(data, l, h):
if l < h:
divider = partition(data, l, h)
quickSort(data, l, divider - 1)
quickSort(data, divider + 1, h)
quickSort(data, 0, len(data) - 1)
print(data)
<|reserved_special_token_1|>
data=[1,4,2,3,6,8,9,7]
def partition(data,l,h):
i=l
j=h
pivot=data[l]
while(i<j):
while(data[i]<=pivot and i<=h-1):
i=i+1
while(data[j]>pivot and j>=l+1):
j=j-1
if(i<j):
data[i],data[j]=data[j],data[i]
data[l],data[j]=data[j],data[l]
return j
def quickSort(data,l,h):
if(l<h):
divider=partition(data,l,h)
quickSort(data,l,divider-1)
quickSort(data,divider+1,h)
quickSort(data,0,len(data)-1)
print(data)
|
flexible
|
{
"blob_id": "1cd82883e9a73cfbe067d58c30659b9b2e5bf473",
"index": 9349,
"step-1": "<mask token>\n\n\ndef partition(data, l, h):\n i = l\n j = h\n pivot = data[l]\n while i < j:\n while data[i] <= pivot and i <= h - 1:\n i = i + 1\n while data[j] > pivot and j >= l + 1:\n j = j - 1\n if i < j:\n data[i], data[j] = data[j], data[i]\n data[l], data[j] = data[j], data[l]\n return j\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef partition(data, l, h):\n i = l\n j = h\n pivot = data[l]\n while i < j:\n while data[i] <= pivot and i <= h - 1:\n i = i + 1\n while data[j] > pivot and j >= l + 1:\n j = j - 1\n if i < j:\n data[i], data[j] = data[j], data[i]\n data[l], data[j] = data[j], data[l]\n return j\n\n\ndef quickSort(data, l, h):\n if l < h:\n divider = partition(data, l, h)\n quickSort(data, l, divider - 1)\n quickSort(data, divider + 1, h)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef partition(data, l, h):\n i = l\n j = h\n pivot = data[l]\n while i < j:\n while data[i] <= pivot and i <= h - 1:\n i = i + 1\n while data[j] > pivot and j >= l + 1:\n j = j - 1\n if i < j:\n data[i], data[j] = data[j], data[i]\n data[l], data[j] = data[j], data[l]\n return j\n\n\ndef quickSort(data, l, h):\n if l < h:\n divider = partition(data, l, h)\n quickSort(data, l, divider - 1)\n quickSort(data, divider + 1, h)\n\n\nquickSort(data, 0, len(data) - 1)\nprint(data)\n",
"step-4": "data = [1, 4, 2, 3, 6, 8, 9, 7]\n\n\ndef partition(data, l, h):\n i = l\n j = h\n pivot = data[l]\n while i < j:\n while data[i] <= pivot and i <= h - 1:\n i = i + 1\n while data[j] > pivot and j >= l + 1:\n j = j - 1\n if i < j:\n data[i], data[j] = data[j], data[i]\n data[l], data[j] = data[j], data[l]\n return j\n\n\ndef quickSort(data, l, h):\n if l < h:\n divider = partition(data, l, h)\n quickSort(data, l, divider - 1)\n quickSort(data, divider + 1, h)\n\n\nquickSort(data, 0, len(data) - 1)\nprint(data)\n",
"step-5": "data=[1,4,2,3,6,8,9,7]\r\n\r\ndef partition(data,l,h):\r\n i=l\r\n j=h\r\n pivot=data[l]\r\n\r\n while(i<j):\r\n while(data[i]<=pivot and i<=h-1):\r\n i=i+1\r\n \r\n\r\n while(data[j]>pivot and j>=l+1):\r\n\r\n j=j-1\r\n\r\n \r\n \r\n if(i<j):\r\n data[i],data[j]=data[j],data[i]\r\n\r\n\r\n data[l],data[j]=data[j],data[l]\r\n return j\r\n\r\ndef quickSort(data,l,h):\r\n if(l<h):\r\n divider=partition(data,l,h)\r\n quickSort(data,l,divider-1)\r\n quickSort(data,divider+1,h)\r\n\r\n \r\n\r\n\r\nquickSort(data,0,len(data)-1)\r\nprint(data)\r\n \r\n \r\n\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def exponent(base, index):
if index == 0 and base == 0:
return -1
elif index == 0:
return 1
elif base == 0:
return 0
else:
product = 1
for indices in range(index):
product *= base
return product
def factorial(num):
if num == 0:
return 1
else:
fact = 1
index = 1
while index <= num:
fact *= index
index = index + 1
return fact
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def exponent(base, index):
if index == 0 and base == 0:
return -1
elif index == 0:
return 1
elif base == 0:
return 0
else:
product = 1
for indices in range(index):
product *= base
return product
def factorial(num):
if num == 0:
return 1
else:
fact = 1
index = 1
while index <= num:
fact *= index
index = index + 1
return fact
<|reserved_special_token_0|>
while index <= num:
if index % 2 == 1:
sin = sin + exponent(radians, odd) / factorial(odd)
if index % 2 == 0:
sin = sin - exponent(radians, odd) / factorial(odd)
index += 1
odd += 2
print('The value of sin for the given radians is :', sin)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def exponent(base, index):
if index == 0 and base == 0:
return -1
elif index == 0:
return 1
elif base == 0:
return 0
else:
product = 1
for indices in range(index):
product *= base
return product
def factorial(num):
if num == 0:
return 1
else:
fact = 1
index = 1
while index <= num:
fact *= index
index = index + 1
return fact
radians = 3 * 3.14159 / 2
num = 15
sin = 0
index = 1
odd = 1
while index <= num:
if index % 2 == 1:
sin = sin + exponent(radians, odd) / factorial(odd)
if index % 2 == 0:
sin = sin - exponent(radians, odd) / factorial(odd)
index += 1
odd += 2
print('The value of sin for the given radians is :', sin)
<|reserved_special_token_1|>
'''
sin(x) = x^1/1! - x^3/3! + x^5/5! - x^7/7! + …..
Input : x, n ( No. of terms I want in series )
Input : 3.14, 10
Output : sin(3.14) = sin(180) = 0
Radians vs Degrees
( 0, 30, 60, 90 ….)
2pi = 360
Pi = 180
Pseudo code :
1.Take input variables radians,num
2. sin = 0
3. Indices = 1
4. odd = 1
4. Iterate indices from 1 to num with condition index <= num
If index%2 == 1
sin = sin + exponent(radians,odd)/factorial(odd)
If index%2 == 0
sin = sin - exponent(radians,odd)/factorial(odd)
Index += 1
odd += 2
5 . print the value of th sin
'''
def exponent(base,index):
if(index == 0 and base == 0):
return -1
elif(index == 0):
return 1
elif(base == 0):
return 0
else:
product = 1
for indices in range(index):
product *= base
return product
def factorial(num):
if(num == 0):
return 1
else:
fact = 1
index =1
while(index <= num):
fact *= index
index = index+1
return fact
radians = 3*3.14159/2
num = 15
sin = 0
index = 1
odd = 1
while(index <= num):
if(index%2 == 1):
sin = sin + (exponent(radians,odd)/factorial(odd))
if(index%2 == 0):
sin = sin - (exponent(radians,odd)/factorial(odd))
index += 1
odd += 2
print("The value of sin for the given radians is :",sin)
|
flexible
|
{
"blob_id": "a99426c0751885f17078e709fd523cf3a26f5286",
"index": 5533,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef exponent(base, index):\n if index == 0 and base == 0:\n return -1\n elif index == 0:\n return 1\n elif base == 0:\n return 0\n else:\n product = 1\n for indices in range(index):\n product *= base\n return product\n\n\ndef factorial(num):\n if num == 0:\n return 1\n else:\n fact = 1\n index = 1\n while index <= num:\n fact *= index\n index = index + 1\n return fact\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef exponent(base, index):\n if index == 0 and base == 0:\n return -1\n elif index == 0:\n return 1\n elif base == 0:\n return 0\n else:\n product = 1\n for indices in range(index):\n product *= base\n return product\n\n\ndef factorial(num):\n if num == 0:\n return 1\n else:\n fact = 1\n index = 1\n while index <= num:\n fact *= index\n index = index + 1\n return fact\n\n\n<mask token>\nwhile index <= num:\n if index % 2 == 1:\n sin = sin + exponent(radians, odd) / factorial(odd)\n if index % 2 == 0:\n sin = sin - exponent(radians, odd) / factorial(odd)\n index += 1\n odd += 2\nprint('The value of sin for the given radians is :', sin)\n",
"step-4": "<mask token>\n\n\ndef exponent(base, index):\n if index == 0 and base == 0:\n return -1\n elif index == 0:\n return 1\n elif base == 0:\n return 0\n else:\n product = 1\n for indices in range(index):\n product *= base\n return product\n\n\ndef factorial(num):\n if num == 0:\n return 1\n else:\n fact = 1\n index = 1\n while index <= num:\n fact *= index\n index = index + 1\n return fact\n\n\nradians = 3 * 3.14159 / 2\nnum = 15\nsin = 0\nindex = 1\nodd = 1\nwhile index <= num:\n if index % 2 == 1:\n sin = sin + exponent(radians, odd) / factorial(odd)\n if index % 2 == 0:\n sin = sin - exponent(radians, odd) / factorial(odd)\n index += 1\n odd += 2\nprint('The value of sin for the given radians is :', sin)\n",
"step-5": "'''\nsin(x) = x^1/1! - x^3/3! + x^5/5! - x^7/7! + …..\n\nInput : x, n ( No. of terms I want in series )\n\nInput : 3.14, 10\n\nOutput : sin(3.14) = sin(180) = 0\n\nRadians vs Degrees\n\n\n( 0, 30, 60, 90 ….)\n2pi = 360\nPi = 180\n\n\nPseudo code :\n1.Take input variables radians,num\n2. sin = 0\n3. Indices = 1\n4. odd = 1\n4. Iterate indices from 1 to num with condition index <= num\n\tIf index%2 == 1\n sin = sin + exponent(radians,odd)/factorial(odd)\n If index%2 == 0\n\tsin = sin - exponent(radians,odd)/factorial(odd)\n Index += 1\n odd += 2\n5 . print the value of th sin\n\n'''\ndef exponent(base,index):\n if(index == 0 and base == 0):\n return -1\n elif(index == 0):\n return 1\n elif(base == 0):\n return 0\n else:\n product = 1\n for indices in range(index):\n product *= base\n return product\n\ndef factorial(num):\n if(num == 0):\n return 1\n else:\n fact = 1\n index =1\n while(index <= num):\n fact *= index\n index = index+1\n return fact\n\n\nradians = 3*3.14159/2\nnum = 15\nsin = 0\nindex = 1\nodd = 1\nwhile(index <= num):\n if(index%2 == 1):\n sin = sin + (exponent(radians,odd)/factorial(odd))\n if(index%2 == 0):\n sin = sin - (exponent(radians,odd)/factorial(odd))\n index += 1\n odd += 2\nprint(\"The value of sin for the given radians is :\",sin)\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class PluginBase(object):
"""
Clase base para todos los plugins
"""
__metaclass__ = PluginType
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PluginType(type):
<|reserved_special_token_0|>
class PluginBase(object):
"""
Clase base para todos los plugins
"""
__metaclass__ = PluginType
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PluginType(type):
def __init__(cls, name, bases, attrs):
super(PluginType, cls).__init__(name, bases, attrs)
if not cls in plugins_list:
plugins_list.append(cls)
class PluginBase(object):
"""
Clase base para todos los plugins
"""
__metaclass__ = PluginType
pass
<|reserved_special_token_1|>
plugins_list = []
class PluginType(type):
def __init__(cls, name, bases, attrs):
super(PluginType, cls).__init__(name, bases, attrs)
if not cls in plugins_list:
plugins_list.append(cls)
class PluginBase(object):
"""
Clase base para todos los plugins
"""
__metaclass__ = PluginType
pass
<|reserved_special_token_1|>
#!/usr/bin/python
# -*- coding: utf-8 -*-
plugins_list = []
class PluginType(type):
def __init__(cls, name, bases, attrs):
super(PluginType, cls).__init__(name, bases, attrs)
# registrar el plugin en la lista
if not cls in plugins_list:
plugins_list.append(cls)
class PluginBase(object):
'''
Clase base para todos los plugins
'''
__metaclass__ = PluginType
pass
|
flexible
|
{
"blob_id": "b670655e3a8e88b97eed35e187b01d6524a16af3",
"index": 7709,
"step-1": "<mask token>\n\n\nclass PluginBase(object):\n \"\"\"\n Clase base para todos los plugins\n \"\"\"\n __metaclass__ = PluginType\n pass\n",
"step-2": "<mask token>\n\n\nclass PluginType(type):\n <mask token>\n\n\nclass PluginBase(object):\n \"\"\"\n Clase base para todos los plugins\n \"\"\"\n __metaclass__ = PluginType\n pass\n",
"step-3": "<mask token>\n\n\nclass PluginType(type):\n\n def __init__(cls, name, bases, attrs):\n super(PluginType, cls).__init__(name, bases, attrs)\n if not cls in plugins_list:\n plugins_list.append(cls)\n\n\nclass PluginBase(object):\n \"\"\"\n Clase base para todos los plugins\n \"\"\"\n __metaclass__ = PluginType\n pass\n",
"step-4": "plugins_list = []\n\n\nclass PluginType(type):\n\n def __init__(cls, name, bases, attrs):\n super(PluginType, cls).__init__(name, bases, attrs)\n if not cls in plugins_list:\n plugins_list.append(cls)\n\n\nclass PluginBase(object):\n \"\"\"\n Clase base para todos los plugins\n \"\"\"\n __metaclass__ = PluginType\n pass\n",
"step-5": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\nplugins_list = []\n\nclass PluginType(type):\n def __init__(cls, name, bases, attrs):\n super(PluginType, cls).__init__(name, bases, attrs)\n\n # registrar el plugin en la lista\n if not cls in plugins_list:\n plugins_list.append(cls)\n\n\nclass PluginBase(object):\n '''\n Clase base para todos los plugins\n '''\n\n __metaclass__ = PluginType\n\n pass\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# -*- encoding:ascii -*-
from mako import runtime, filters, cache
UNDEFINED = runtime.UNDEFINED
__M_dict_builtin = dict
__M_locals_builtin = locals
_magic_number = 6
_modified_time = 1383550959.0389481
_template_filename='templates/webapps/tool_shed/repository/browse_repository.mako'
_template_uri='/webapps/tool_shed/repository/browse_repository.mako'
_template_cache=cache.Cache(__name__, _modified_time)
_source_encoding='ascii'
_exports = ['stylesheets', 'javascripts']
# SOURCE LINE 7
def inherit(context):
if context.get('use_panels'):
return '/webapps/tool_shed/base_panels.mako'
else:
return '/base.mako'
def _mako_get_namespace(context, name):
try:
return context.namespaces[(__name__, name)]
except KeyError:
_mako_generate_namespaces(context)
return context.namespaces[(__name__, name)]
def _mako_generate_namespaces(context):
# SOURCE LINE 2
ns = runtime.TemplateNamespace('__anon_0x88e2e50', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri)
context.namespaces[(__name__, '__anon_0x88e2e50')] = ns
# SOURCE LINE 4
ns = runtime.TemplateNamespace('__anon_0x7ee9750', context._clean_inheritance_tokens(), templateuri=u'/webapps/tool_shed/common/common.mako', callables=None, calling_uri=_template_uri)
context.namespaces[(__name__, '__anon_0x7ee9750')] = ns
# SOURCE LINE 5
ns = runtime.TemplateNamespace('__anon_0x8a2fd90', context._clean_inheritance_tokens(), templateuri=u'/webapps/tool_shed/repository/common.mako', callables=None, calling_uri=_template_uri)
context.namespaces[(__name__, '__anon_0x8a2fd90')] = ns
# SOURCE LINE 3
ns = runtime.TemplateNamespace('__anon_0x88e21d0', context._clean_inheritance_tokens(), templateuri=u'/webapps/tool_shed/common/repository_actions_menu.mako', callables=None, calling_uri=_template_uri)
context.namespaces[(__name__, '__anon_0x88e21d0')] = ns
def _mako_inherit(template, context):
_mako_generate_namespaces(context)
return runtime._inherit_from(context, (inherit(context)), _template_uri)
def render_body(context,**pageargs):
context.caller_stack._push_frame()
try:
__M_locals = __M_dict_builtin(pageargs=pageargs)
_import_ns = {}
_mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns, [u'render_msg'])
_mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns, [u'*'])
_mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns, [u'*'])
_mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns, [u'render_tool_shed_repository_actions'])
status = _import_ns.get('status', context.get('status', UNDEFINED))
render_clone_str = _import_ns.get('render_clone_str', context.get('render_clone_str', UNDEFINED))
render_repository_type_select_field = _import_ns.get('render_repository_type_select_field', context.get('render_repository_type_select_field', UNDEFINED))
render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED))
repository = _import_ns.get('repository', context.get('repository', UNDEFINED))
h = _import_ns.get('h', context.get('h', UNDEFINED))
render_tool_shed_repository_actions = _import_ns.get('render_tool_shed_repository_actions', context.get('render_tool_shed_repository_actions', UNDEFINED))
is_malicious = _import_ns.get('is_malicious', context.get('is_malicious', UNDEFINED))
repository_type_select_field = _import_ns.get('repository_type_select_field', context.get('repository_type_select_field', UNDEFINED))
commit_message = _import_ns.get('commit_message', context.get('commit_message', UNDEFINED))
message = _import_ns.get('message', context.get('message', UNDEFINED))
trans = _import_ns.get('trans', context.get('trans', UNDEFINED))
__M_writer = context.writer()
# SOURCE LINE 1
__M_writer(u'\n')
# SOURCE LINE 2
__M_writer(u'\n')
# SOURCE LINE 3
__M_writer(u'\n')
# SOURCE LINE 4
__M_writer(u'\n')
# SOURCE LINE 5
__M_writer(u'\n\n')
# SOURCE LINE 13
__M_writer(u'\n')
# SOURCE LINE 14
__M_writer(u'\n\n')
# SOURCE LINE 19
__M_writer(u'\n\n')
# SOURCE LINE 25
__M_writer(u'\n\n')
# SOURCE LINE 27
is_new = repository.is_new( trans.app )
can_push = trans.app.security_agent.can_push( trans.app, trans.user, repository )
can_download = not is_new and ( not is_malicious or can_push )
can_browse_contents = not is_new
__M_locals_builtin_stored = __M_locals_builtin()
__M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['can_push','can_browse_contents','is_new','can_download'] if __M_key in __M_locals_builtin_stored]))
# SOURCE LINE 32
__M_writer(u'\n\n')
# SOURCE LINE 34
__M_writer(unicode(render_tool_shed_repository_actions( repository )))
__M_writer(u'\n\n')
# SOURCE LINE 36
if message:
# SOURCE LINE 37
__M_writer(u' ')
__M_writer(unicode(render_msg( message, status )))
__M_writer(u'\n')
pass
# SOURCE LINE 39
__M_writer(u'\n')
# SOURCE LINE 40
if can_browse_contents:
# SOURCE LINE 41
__M_writer(u' <div class="toolForm">\n <div class="toolFormTitle">Repository \'')
# SOURCE LINE 42
__M_writer(filters.html_escape(unicode(repository.name )))
__M_writer(u"' revision ")
__M_writer(filters.html_escape(unicode(repository.tip( trans.app ) )))
__M_writer(u' (repository tip)</div>\n')
# SOURCE LINE 43
if can_download:
# SOURCE LINE 44
__M_writer(u' <div class="form-row">\n <label>Clone this repository:</label>\n ')
# SOURCE LINE 46
__M_writer(unicode(render_clone_str( repository )))
__M_writer(u'\n </div>\n')
pass
# SOURCE LINE 49
__M_writer(u' <form name="repository_type">\n ')
# SOURCE LINE 50
__M_writer(unicode(render_repository_type_select_field( repository_type_select_field, render_help=False )))
__M_writer(u'\n </form>\n')
# SOURCE LINE 52
if can_push:
# SOURCE LINE 53
__M_writer(u' <form name="select_files_to_delete" id="select_files_to_delete" action="')
__M_writer(unicode(h.url_for( controller='repository', action='select_files_to_delete', id=trans.security.encode_id( repository.id ))))
__M_writer(u'" method="post" >\n <div class="form-row" >\n <label>Contents:</label>\n <div id="tree" >\n Loading...\n </div>\n <div class="toolParamHelp" style="clear: both;">\n Click on a file to display it\'s contents below. You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.\n </div>\n <input id="selected_files_to_delete" name="selected_files_to_delete" type="hidden" value=""/>\n </div>\n <div class="form-row">\n <label>Message:</label>\n <div class="form-row-input">\n')
# SOURCE LINE 67
if commit_message:
# SOURCE LINE 68
__M_writer(u' <textarea name="commit_message" rows="3" cols="35">')
__M_writer(filters.html_escape(unicode(commit_message )))
__M_writer(u'</textarea>\n')
# SOURCE LINE 69
else:
# SOURCE LINE 70
__M_writer(u' <textarea name="commit_message" rows="3" cols="35"></textarea>\n')
pass
# SOURCE LINE 72
__M_writer(u' </div>\n <div class="toolParamHelp" style="clear: both;">\n This is the commit message for the mercurial change set that will be created if you delete selected files.\n </div>\n <div style="clear: both"></div>\n </div>\n <div class="form-row">\n <input type="submit" name="select_files_to_delete_button" value="Delete selected files"/>\n </div>\n <div class="form-row">\n <div id="file_contents" class="toolParamHelp" style="clear: both;background-color:#FAFAFA;"></div>\n </div>\n </form>\n')
# SOURCE LINE 85
else:
# SOURCE LINE 86
__M_writer(u' <div class="toolFormBody">\n <div class="form-row" >\n <label>Contents:</label>\n <div id="tree" >\n Loading...\n </div>\n </div>\n <div class="form-row">\n <div id="file_contents" class="toolParamHelp" style="clear: both;background-color:#FAFAFA;"></div>\n </div>\n </div>\n')
pass
# SOURCE LINE 98
__M_writer(u' </div>\n <p/>\n')
pass
return ''
finally:
context.caller_stack._pop_frame()
def render_stylesheets(context):
context.caller_stack._push_frame()
try:
_import_ns = {}
_mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns, [u'render_msg'])
_mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns, [u'*'])
_mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns, [u'*'])
_mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns, [u'render_tool_shed_repository_actions'])
h = _import_ns.get('h', context.get('h', UNDEFINED))
parent = _import_ns.get('parent', context.get('parent', UNDEFINED))
__M_writer = context.writer()
# SOURCE LINE 16
__M_writer(u'\n ')
# SOURCE LINE 17
__M_writer(unicode(parent.stylesheets()))
__M_writer(u'\n ')
# SOURCE LINE 18
__M_writer(unicode(h.css( "jquery.rating", "dynatree_skin/ui.dynatree" )))
__M_writer(u'\n')
return ''
finally:
context.caller_stack._pop_frame()
def render_javascripts(context):
context.caller_stack._push_frame()
try:
_import_ns = {}
_mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns, [u'render_msg'])
_mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns, [u'*'])
_mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns, [u'*'])
_mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns, [u'render_tool_shed_repository_actions'])
common_javascripts = _import_ns.get('common_javascripts', context.get('common_javascripts', UNDEFINED))
h = _import_ns.get('h', context.get('h', UNDEFINED))
repository = _import_ns.get('repository', context.get('repository', UNDEFINED))
parent = _import_ns.get('parent', context.get('parent', UNDEFINED))
__M_writer = context.writer()
# SOURCE LINE 21
__M_writer(u'\n ')
# SOURCE LINE 22
__M_writer(unicode(parent.javascripts()))
__M_writer(u'\n ')
# SOURCE LINE 23
__M_writer(unicode(h.js( "libs/jquery/jquery.rating", "libs/jquery/jquery-ui", "libs/jquery/jquery.cookie", "libs/jquery/jquery.dynatree" )))
__M_writer(u'\n ')
# SOURCE LINE 24
__M_writer(unicode(common_javascripts(repository)))
__M_writer(u'\n')
return ''
finally:
context.caller_stack._pop_frame()
|
normal
|
{
"blob_id": "fd54bbfbc81aec371ad6c82bf402a5a3673a9f24",
"index": 8892,
"step-1": "<mask token>\n\n\ndef _mako_generate_namespaces(context):\n ns = runtime.TemplateNamespace('__anon_0x88e2e50', context.\n _clean_inheritance_tokens(), templateuri=u'/message.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e2e50'] = ns\n ns = runtime.TemplateNamespace('__anon_0x7ee9750', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x7ee9750'] = ns\n ns = runtime.TemplateNamespace('__anon_0x8a2fd90', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/repository/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x8a2fd90'] = ns\n ns = runtime.TemplateNamespace('__anon_0x88e21d0', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/repository_actions_menu.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e21d0'] = ns\n\n\ndef _mako_inherit(template, context):\n _mako_generate_namespaces(context)\n return runtime._inherit_from(context, inherit(context), _template_uri)\n\n\ndef render_body(context, **pageargs):\n context.caller_stack._push_frame()\n try:\n __M_locals = __M_dict_builtin(pageargs=pageargs)\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n status = _import_ns.get('status', context.get('status', UNDEFINED))\n render_clone_str = _import_ns.get('render_clone_str', context.get(\n 'render_clone_str', UNDEFINED))\n render_repository_type_select_field = _import_ns.get(\n 'render_repository_type_select_field', context.get(\n 'render_repository_type_select_field', UNDEFINED))\n render_msg = _import_ns.get('render_msg', context.get('render_msg',\n UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n render_tool_shed_repository_actions = _import_ns.get(\n 'render_tool_shed_repository_actions', context.get(\n 'render_tool_shed_repository_actions', UNDEFINED))\n is_malicious = _import_ns.get('is_malicious', context.get(\n 'is_malicious', UNDEFINED))\n repository_type_select_field = _import_ns.get(\n 'repository_type_select_field', context.get(\n 'repository_type_select_field', UNDEFINED))\n commit_message = _import_ns.get('commit_message', context.get(\n 'commit_message', UNDEFINED))\n message = _import_ns.get('message', context.get('message', UNDEFINED))\n trans = _import_ns.get('trans', context.get('trans', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n is_new = repository.is_new(trans.app)\n can_push = trans.app.security_agent.can_push(trans.app, trans.user,\n repository)\n can_download = not is_new and (not is_malicious or can_push)\n can_browse_contents = not is_new\n __M_locals_builtin_stored = __M_locals_builtin()\n __M_locals.update(__M_dict_builtin([(__M_key,\n __M_locals_builtin_stored[__M_key]) for __M_key in ['can_push',\n 'can_browse_contents', 'is_new', 'can_download'] if __M_key in\n __M_locals_builtin_stored]))\n __M_writer(u'\\n\\n')\n __M_writer(unicode(render_tool_shed_repository_actions(repository)))\n __M_writer(u'\\n\\n')\n if message:\n __M_writer(u' ')\n __M_writer(unicode(render_msg(message, status)))\n __M_writer(u'\\n')\n pass\n __M_writer(u'\\n')\n if can_browse_contents:\n __M_writer(\n u\"\"\" <div class=\"toolForm\">\n <div class=\"toolFormTitle\">Repository '\"\"\"\n )\n __M_writer(filters.html_escape(unicode(repository.name)))\n __M_writer(u\"' revision \")\n __M_writer(filters.html_escape(unicode(repository.tip(trans.app))))\n __M_writer(u' (repository tip)</div>\\n')\n if can_download:\n __M_writer(\n u\"\"\" <div class=\"form-row\">\n <label>Clone this repository:</label>\n \"\"\"\n )\n __M_writer(unicode(render_clone_str(repository)))\n __M_writer(u'\\n </div>\\n')\n pass\n __M_writer(u' <form name=\"repository_type\">\\n ')\n __M_writer(unicode(render_repository_type_select_field(\n repository_type_select_field, render_help=False)))\n __M_writer(u'\\n </form>\\n')\n if can_push:\n __M_writer(\n u' <form name=\"select_files_to_delete\" id=\"select_files_to_delete\" action=\"'\n )\n __M_writer(unicode(h.url_for(controller='repository',\n action='select_files_to_delete', id=trans.security.\n encode_id(repository.id))))\n __M_writer(\n u\"\"\"\" method=\"post\" >\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n Click on a file to display it's contents below. You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.\n </div>\n <input id=\"selected_files_to_delete\" name=\"selected_files_to_delete\" type=\"hidden\" value=\"\"/>\n </div>\n <div class=\"form-row\">\n <label>Message:</label>\n <div class=\"form-row-input\">\n\"\"\"\n )\n if commit_message:\n __M_writer(\n u' <textarea name=\"commit_message\" rows=\"3\" cols=\"35\">'\n )\n __M_writer(filters.html_escape(unicode(commit_message)))\n __M_writer(u'</textarea>\\n')\n else:\n __M_writer(\n u\"\"\" <textarea name=\"commit_message\" rows=\"3\" cols=\"35\"></textarea>\n\"\"\"\n )\n pass\n __M_writer(\n u\"\"\" </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n This is the commit message for the mercurial change set that will be created if you delete selected files.\n </div>\n <div style=\"clear: both\"></div>\n </div>\n <div class=\"form-row\">\n <input type=\"submit\" name=\"select_files_to_delete_button\" value=\"Delete selected files\"/>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </form>\n\"\"\"\n )\n else:\n __M_writer(\n u\"\"\" <div class=\"toolFormBody\">\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </div>\n\"\"\"\n )\n pass\n __M_writer(u' </div>\\n <p/>\\n')\n pass\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef inherit(context):\n if context.get('use_panels'):\n return '/webapps/tool_shed/base_panels.mako'\n else:\n return '/base.mako'\n\n\n<mask token>\n\n\ndef _mako_generate_namespaces(context):\n ns = runtime.TemplateNamespace('__anon_0x88e2e50', context.\n _clean_inheritance_tokens(), templateuri=u'/message.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e2e50'] = ns\n ns = runtime.TemplateNamespace('__anon_0x7ee9750', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x7ee9750'] = ns\n ns = runtime.TemplateNamespace('__anon_0x8a2fd90', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/repository/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x8a2fd90'] = ns\n ns = runtime.TemplateNamespace('__anon_0x88e21d0', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/repository_actions_menu.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e21d0'] = ns\n\n\ndef _mako_inherit(template, context):\n _mako_generate_namespaces(context)\n return runtime._inherit_from(context, inherit(context), _template_uri)\n\n\ndef render_body(context, **pageargs):\n context.caller_stack._push_frame()\n try:\n __M_locals = __M_dict_builtin(pageargs=pageargs)\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n status = _import_ns.get('status', context.get('status', UNDEFINED))\n render_clone_str = _import_ns.get('render_clone_str', context.get(\n 'render_clone_str', UNDEFINED))\n render_repository_type_select_field = _import_ns.get(\n 'render_repository_type_select_field', context.get(\n 'render_repository_type_select_field', UNDEFINED))\n render_msg = _import_ns.get('render_msg', context.get('render_msg',\n UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n render_tool_shed_repository_actions = _import_ns.get(\n 'render_tool_shed_repository_actions', context.get(\n 'render_tool_shed_repository_actions', UNDEFINED))\n is_malicious = _import_ns.get('is_malicious', context.get(\n 'is_malicious', UNDEFINED))\n repository_type_select_field = _import_ns.get(\n 'repository_type_select_field', context.get(\n 'repository_type_select_field', UNDEFINED))\n commit_message = _import_ns.get('commit_message', context.get(\n 'commit_message', UNDEFINED))\n message = _import_ns.get('message', context.get('message', UNDEFINED))\n trans = _import_ns.get('trans', context.get('trans', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n is_new = repository.is_new(trans.app)\n can_push = trans.app.security_agent.can_push(trans.app, trans.user,\n repository)\n can_download = not is_new and (not is_malicious or can_push)\n can_browse_contents = not is_new\n __M_locals_builtin_stored = __M_locals_builtin()\n __M_locals.update(__M_dict_builtin([(__M_key,\n __M_locals_builtin_stored[__M_key]) for __M_key in ['can_push',\n 'can_browse_contents', 'is_new', 'can_download'] if __M_key in\n __M_locals_builtin_stored]))\n __M_writer(u'\\n\\n')\n __M_writer(unicode(render_tool_shed_repository_actions(repository)))\n __M_writer(u'\\n\\n')\n if message:\n __M_writer(u' ')\n __M_writer(unicode(render_msg(message, status)))\n __M_writer(u'\\n')\n pass\n __M_writer(u'\\n')\n if can_browse_contents:\n __M_writer(\n u\"\"\" <div class=\"toolForm\">\n <div class=\"toolFormTitle\">Repository '\"\"\"\n )\n __M_writer(filters.html_escape(unicode(repository.name)))\n __M_writer(u\"' revision \")\n __M_writer(filters.html_escape(unicode(repository.tip(trans.app))))\n __M_writer(u' (repository tip)</div>\\n')\n if can_download:\n __M_writer(\n u\"\"\" <div class=\"form-row\">\n <label>Clone this repository:</label>\n \"\"\"\n )\n __M_writer(unicode(render_clone_str(repository)))\n __M_writer(u'\\n </div>\\n')\n pass\n __M_writer(u' <form name=\"repository_type\">\\n ')\n __M_writer(unicode(render_repository_type_select_field(\n repository_type_select_field, render_help=False)))\n __M_writer(u'\\n </form>\\n')\n if can_push:\n __M_writer(\n u' <form name=\"select_files_to_delete\" id=\"select_files_to_delete\" action=\"'\n )\n __M_writer(unicode(h.url_for(controller='repository',\n action='select_files_to_delete', id=trans.security.\n encode_id(repository.id))))\n __M_writer(\n u\"\"\"\" method=\"post\" >\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n Click on a file to display it's contents below. You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.\n </div>\n <input id=\"selected_files_to_delete\" name=\"selected_files_to_delete\" type=\"hidden\" value=\"\"/>\n </div>\n <div class=\"form-row\">\n <label>Message:</label>\n <div class=\"form-row-input\">\n\"\"\"\n )\n if commit_message:\n __M_writer(\n u' <textarea name=\"commit_message\" rows=\"3\" cols=\"35\">'\n )\n __M_writer(filters.html_escape(unicode(commit_message)))\n __M_writer(u'</textarea>\\n')\n else:\n __M_writer(\n u\"\"\" <textarea name=\"commit_message\" rows=\"3\" cols=\"35\"></textarea>\n\"\"\"\n )\n pass\n __M_writer(\n u\"\"\" </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n This is the commit message for the mercurial change set that will be created if you delete selected files.\n </div>\n <div style=\"clear: both\"></div>\n </div>\n <div class=\"form-row\">\n <input type=\"submit\" name=\"select_files_to_delete_button\" value=\"Delete selected files\"/>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </form>\n\"\"\"\n )\n else:\n __M_writer(\n u\"\"\" <div class=\"toolFormBody\">\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </div>\n\"\"\"\n )\n pass\n __M_writer(u' </div>\\n <p/>\\n')\n pass\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\n<mask token>\n\n\ndef render_javascripts(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n common_javascripts = _import_ns.get('common_javascripts', context.\n get('common_javascripts', UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n ')\n __M_writer(unicode(parent.javascripts()))\n __M_writer(u'\\n ')\n __M_writer(unicode(h.js('libs/jquery/jquery.rating',\n 'libs/jquery/jquery-ui', 'libs/jquery/jquery.cookie',\n 'libs/jquery/jquery.dynatree')))\n __M_writer(u'\\n ')\n __M_writer(unicode(common_javascripts(repository)))\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n",
"step-3": "<mask token>\n\n\ndef inherit(context):\n if context.get('use_panels'):\n return '/webapps/tool_shed/base_panels.mako'\n else:\n return '/base.mako'\n\n\ndef _mako_get_namespace(context, name):\n try:\n return context.namespaces[__name__, name]\n except KeyError:\n _mako_generate_namespaces(context)\n return context.namespaces[__name__, name]\n\n\ndef _mako_generate_namespaces(context):\n ns = runtime.TemplateNamespace('__anon_0x88e2e50', context.\n _clean_inheritance_tokens(), templateuri=u'/message.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e2e50'] = ns\n ns = runtime.TemplateNamespace('__anon_0x7ee9750', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x7ee9750'] = ns\n ns = runtime.TemplateNamespace('__anon_0x8a2fd90', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/repository/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x8a2fd90'] = ns\n ns = runtime.TemplateNamespace('__anon_0x88e21d0', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/repository_actions_menu.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e21d0'] = ns\n\n\ndef _mako_inherit(template, context):\n _mako_generate_namespaces(context)\n return runtime._inherit_from(context, inherit(context), _template_uri)\n\n\ndef render_body(context, **pageargs):\n context.caller_stack._push_frame()\n try:\n __M_locals = __M_dict_builtin(pageargs=pageargs)\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n status = _import_ns.get('status', context.get('status', UNDEFINED))\n render_clone_str = _import_ns.get('render_clone_str', context.get(\n 'render_clone_str', UNDEFINED))\n render_repository_type_select_field = _import_ns.get(\n 'render_repository_type_select_field', context.get(\n 'render_repository_type_select_field', UNDEFINED))\n render_msg = _import_ns.get('render_msg', context.get('render_msg',\n UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n render_tool_shed_repository_actions = _import_ns.get(\n 'render_tool_shed_repository_actions', context.get(\n 'render_tool_shed_repository_actions', UNDEFINED))\n is_malicious = _import_ns.get('is_malicious', context.get(\n 'is_malicious', UNDEFINED))\n repository_type_select_field = _import_ns.get(\n 'repository_type_select_field', context.get(\n 'repository_type_select_field', UNDEFINED))\n commit_message = _import_ns.get('commit_message', context.get(\n 'commit_message', UNDEFINED))\n message = _import_ns.get('message', context.get('message', UNDEFINED))\n trans = _import_ns.get('trans', context.get('trans', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n is_new = repository.is_new(trans.app)\n can_push = trans.app.security_agent.can_push(trans.app, trans.user,\n repository)\n can_download = not is_new and (not is_malicious or can_push)\n can_browse_contents = not is_new\n __M_locals_builtin_stored = __M_locals_builtin()\n __M_locals.update(__M_dict_builtin([(__M_key,\n __M_locals_builtin_stored[__M_key]) for __M_key in ['can_push',\n 'can_browse_contents', 'is_new', 'can_download'] if __M_key in\n __M_locals_builtin_stored]))\n __M_writer(u'\\n\\n')\n __M_writer(unicode(render_tool_shed_repository_actions(repository)))\n __M_writer(u'\\n\\n')\n if message:\n __M_writer(u' ')\n __M_writer(unicode(render_msg(message, status)))\n __M_writer(u'\\n')\n pass\n __M_writer(u'\\n')\n if can_browse_contents:\n __M_writer(\n u\"\"\" <div class=\"toolForm\">\n <div class=\"toolFormTitle\">Repository '\"\"\"\n )\n __M_writer(filters.html_escape(unicode(repository.name)))\n __M_writer(u\"' revision \")\n __M_writer(filters.html_escape(unicode(repository.tip(trans.app))))\n __M_writer(u' (repository tip)</div>\\n')\n if can_download:\n __M_writer(\n u\"\"\" <div class=\"form-row\">\n <label>Clone this repository:</label>\n \"\"\"\n )\n __M_writer(unicode(render_clone_str(repository)))\n __M_writer(u'\\n </div>\\n')\n pass\n __M_writer(u' <form name=\"repository_type\">\\n ')\n __M_writer(unicode(render_repository_type_select_field(\n repository_type_select_field, render_help=False)))\n __M_writer(u'\\n </form>\\n')\n if can_push:\n __M_writer(\n u' <form name=\"select_files_to_delete\" id=\"select_files_to_delete\" action=\"'\n )\n __M_writer(unicode(h.url_for(controller='repository',\n action='select_files_to_delete', id=trans.security.\n encode_id(repository.id))))\n __M_writer(\n u\"\"\"\" method=\"post\" >\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n Click on a file to display it's contents below. You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.\n </div>\n <input id=\"selected_files_to_delete\" name=\"selected_files_to_delete\" type=\"hidden\" value=\"\"/>\n </div>\n <div class=\"form-row\">\n <label>Message:</label>\n <div class=\"form-row-input\">\n\"\"\"\n )\n if commit_message:\n __M_writer(\n u' <textarea name=\"commit_message\" rows=\"3\" cols=\"35\">'\n )\n __M_writer(filters.html_escape(unicode(commit_message)))\n __M_writer(u'</textarea>\\n')\n else:\n __M_writer(\n u\"\"\" <textarea name=\"commit_message\" rows=\"3\" cols=\"35\"></textarea>\n\"\"\"\n )\n pass\n __M_writer(\n u\"\"\" </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n This is the commit message for the mercurial change set that will be created if you delete selected files.\n </div>\n <div style=\"clear: both\"></div>\n </div>\n <div class=\"form-row\">\n <input type=\"submit\" name=\"select_files_to_delete_button\" value=\"Delete selected files\"/>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </form>\n\"\"\"\n )\n else:\n __M_writer(\n u\"\"\" <div class=\"toolFormBody\">\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </div>\n\"\"\"\n )\n pass\n __M_writer(u' </div>\\n <p/>\\n')\n pass\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\ndef render_stylesheets(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n ')\n __M_writer(unicode(parent.stylesheets()))\n __M_writer(u'\\n ')\n __M_writer(unicode(h.css('jquery.rating', 'dynatree_skin/ui.dynatree'))\n )\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\ndef render_javascripts(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n common_javascripts = _import_ns.get('common_javascripts', context.\n get('common_javascripts', UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n ')\n __M_writer(unicode(parent.javascripts()))\n __M_writer(u'\\n ')\n __M_writer(unicode(h.js('libs/jquery/jquery.rating',\n 'libs/jquery/jquery-ui', 'libs/jquery/jquery.cookie',\n 'libs/jquery/jquery.dynatree')))\n __M_writer(u'\\n ')\n __M_writer(unicode(common_javascripts(repository)))\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n",
"step-4": "from mako import runtime, filters, cache\nUNDEFINED = runtime.UNDEFINED\n__M_dict_builtin = dict\n__M_locals_builtin = locals\n_magic_number = 6\n_modified_time = 1383550959.038948\n_template_filename = (\n 'templates/webapps/tool_shed/repository/browse_repository.mako')\n_template_uri = '/webapps/tool_shed/repository/browse_repository.mako'\n_template_cache = cache.Cache(__name__, _modified_time)\n_source_encoding = 'ascii'\n_exports = ['stylesheets', 'javascripts']\n\n\ndef inherit(context):\n if context.get('use_panels'):\n return '/webapps/tool_shed/base_panels.mako'\n else:\n return '/base.mako'\n\n\ndef _mako_get_namespace(context, name):\n try:\n return context.namespaces[__name__, name]\n except KeyError:\n _mako_generate_namespaces(context)\n return context.namespaces[__name__, name]\n\n\ndef _mako_generate_namespaces(context):\n ns = runtime.TemplateNamespace('__anon_0x88e2e50', context.\n _clean_inheritance_tokens(), templateuri=u'/message.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e2e50'] = ns\n ns = runtime.TemplateNamespace('__anon_0x7ee9750', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x7ee9750'] = ns\n ns = runtime.TemplateNamespace('__anon_0x8a2fd90', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/repository/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x8a2fd90'] = ns\n ns = runtime.TemplateNamespace('__anon_0x88e21d0', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/repository_actions_menu.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e21d0'] = ns\n\n\ndef _mako_inherit(template, context):\n _mako_generate_namespaces(context)\n return runtime._inherit_from(context, inherit(context), _template_uri)\n\n\ndef render_body(context, **pageargs):\n context.caller_stack._push_frame()\n try:\n __M_locals = __M_dict_builtin(pageargs=pageargs)\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n status = _import_ns.get('status', context.get('status', UNDEFINED))\n render_clone_str = _import_ns.get('render_clone_str', context.get(\n 'render_clone_str', UNDEFINED))\n render_repository_type_select_field = _import_ns.get(\n 'render_repository_type_select_field', context.get(\n 'render_repository_type_select_field', UNDEFINED))\n render_msg = _import_ns.get('render_msg', context.get('render_msg',\n UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n render_tool_shed_repository_actions = _import_ns.get(\n 'render_tool_shed_repository_actions', context.get(\n 'render_tool_shed_repository_actions', UNDEFINED))\n is_malicious = _import_ns.get('is_malicious', context.get(\n 'is_malicious', UNDEFINED))\n repository_type_select_field = _import_ns.get(\n 'repository_type_select_field', context.get(\n 'repository_type_select_field', UNDEFINED))\n commit_message = _import_ns.get('commit_message', context.get(\n 'commit_message', UNDEFINED))\n message = _import_ns.get('message', context.get('message', UNDEFINED))\n trans = _import_ns.get('trans', context.get('trans', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n is_new = repository.is_new(trans.app)\n can_push = trans.app.security_agent.can_push(trans.app, trans.user,\n repository)\n can_download = not is_new and (not is_malicious or can_push)\n can_browse_contents = not is_new\n __M_locals_builtin_stored = __M_locals_builtin()\n __M_locals.update(__M_dict_builtin([(__M_key,\n __M_locals_builtin_stored[__M_key]) for __M_key in ['can_push',\n 'can_browse_contents', 'is_new', 'can_download'] if __M_key in\n __M_locals_builtin_stored]))\n __M_writer(u'\\n\\n')\n __M_writer(unicode(render_tool_shed_repository_actions(repository)))\n __M_writer(u'\\n\\n')\n if message:\n __M_writer(u' ')\n __M_writer(unicode(render_msg(message, status)))\n __M_writer(u'\\n')\n pass\n __M_writer(u'\\n')\n if can_browse_contents:\n __M_writer(\n u\"\"\" <div class=\"toolForm\">\n <div class=\"toolFormTitle\">Repository '\"\"\"\n )\n __M_writer(filters.html_escape(unicode(repository.name)))\n __M_writer(u\"' revision \")\n __M_writer(filters.html_escape(unicode(repository.tip(trans.app))))\n __M_writer(u' (repository tip)</div>\\n')\n if can_download:\n __M_writer(\n u\"\"\" <div class=\"form-row\">\n <label>Clone this repository:</label>\n \"\"\"\n )\n __M_writer(unicode(render_clone_str(repository)))\n __M_writer(u'\\n </div>\\n')\n pass\n __M_writer(u' <form name=\"repository_type\">\\n ')\n __M_writer(unicode(render_repository_type_select_field(\n repository_type_select_field, render_help=False)))\n __M_writer(u'\\n </form>\\n')\n if can_push:\n __M_writer(\n u' <form name=\"select_files_to_delete\" id=\"select_files_to_delete\" action=\"'\n )\n __M_writer(unicode(h.url_for(controller='repository',\n action='select_files_to_delete', id=trans.security.\n encode_id(repository.id))))\n __M_writer(\n u\"\"\"\" method=\"post\" >\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n Click on a file to display it's contents below. You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.\n </div>\n <input id=\"selected_files_to_delete\" name=\"selected_files_to_delete\" type=\"hidden\" value=\"\"/>\n </div>\n <div class=\"form-row\">\n <label>Message:</label>\n <div class=\"form-row-input\">\n\"\"\"\n )\n if commit_message:\n __M_writer(\n u' <textarea name=\"commit_message\" rows=\"3\" cols=\"35\">'\n )\n __M_writer(filters.html_escape(unicode(commit_message)))\n __M_writer(u'</textarea>\\n')\n else:\n __M_writer(\n u\"\"\" <textarea name=\"commit_message\" rows=\"3\" cols=\"35\"></textarea>\n\"\"\"\n )\n pass\n __M_writer(\n u\"\"\" </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n This is the commit message for the mercurial change set that will be created if you delete selected files.\n </div>\n <div style=\"clear: both\"></div>\n </div>\n <div class=\"form-row\">\n <input type=\"submit\" name=\"select_files_to_delete_button\" value=\"Delete selected files\"/>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </form>\n\"\"\"\n )\n else:\n __M_writer(\n u\"\"\" <div class=\"toolFormBody\">\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </div>\n\"\"\"\n )\n pass\n __M_writer(u' </div>\\n <p/>\\n')\n pass\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\ndef render_stylesheets(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n ')\n __M_writer(unicode(parent.stylesheets()))\n __M_writer(u'\\n ')\n __M_writer(unicode(h.css('jquery.rating', 'dynatree_skin/ui.dynatree'))\n )\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\ndef render_javascripts(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n common_javascripts = _import_ns.get('common_javascripts', context.\n get('common_javascripts', UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n ')\n __M_writer(unicode(parent.javascripts()))\n __M_writer(u'\\n ')\n __M_writer(unicode(h.js('libs/jquery/jquery.rating',\n 'libs/jquery/jquery-ui', 'libs/jquery/jquery.cookie',\n 'libs/jquery/jquery.dynatree')))\n __M_writer(u'\\n ')\n __M_writer(unicode(common_javascripts(repository)))\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n",
"step-5": "# -*- encoding:ascii -*-\nfrom mako import runtime, filters, cache\nUNDEFINED = runtime.UNDEFINED\n__M_dict_builtin = dict\n__M_locals_builtin = locals\n_magic_number = 6\n_modified_time = 1383550959.0389481\n_template_filename='templates/webapps/tool_shed/repository/browse_repository.mako'\n_template_uri='/webapps/tool_shed/repository/browse_repository.mako'\n_template_cache=cache.Cache(__name__, _modified_time)\n_source_encoding='ascii'\n_exports = ['stylesheets', 'javascripts']\n\n\n# SOURCE LINE 7\n\ndef inherit(context):\n if context.get('use_panels'):\n return '/webapps/tool_shed/base_panels.mako'\n else:\n return '/base.mako'\n\n\ndef _mako_get_namespace(context, name):\n try:\n return context.namespaces[(__name__, name)]\n except KeyError:\n _mako_generate_namespaces(context)\n return context.namespaces[(__name__, name)]\ndef _mako_generate_namespaces(context):\n # SOURCE LINE 2\n ns = runtime.TemplateNamespace('__anon_0x88e2e50', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri)\n context.namespaces[(__name__, '__anon_0x88e2e50')] = ns\n\n # SOURCE LINE 4\n ns = runtime.TemplateNamespace('__anon_0x7ee9750', context._clean_inheritance_tokens(), templateuri=u'/webapps/tool_shed/common/common.mako', callables=None, calling_uri=_template_uri)\n context.namespaces[(__name__, '__anon_0x7ee9750')] = ns\n\n # SOURCE LINE 5\n ns = runtime.TemplateNamespace('__anon_0x8a2fd90', context._clean_inheritance_tokens(), templateuri=u'/webapps/tool_shed/repository/common.mako', callables=None, calling_uri=_template_uri)\n context.namespaces[(__name__, '__anon_0x8a2fd90')] = ns\n\n # SOURCE LINE 3\n ns = runtime.TemplateNamespace('__anon_0x88e21d0', context._clean_inheritance_tokens(), templateuri=u'/webapps/tool_shed/common/repository_actions_menu.mako', callables=None, calling_uri=_template_uri)\n context.namespaces[(__name__, '__anon_0x88e21d0')] = ns\n\ndef _mako_inherit(template, context):\n _mako_generate_namespaces(context)\n return runtime._inherit_from(context, (inherit(context)), _template_uri)\ndef render_body(context,**pageargs):\n context.caller_stack._push_frame()\n try:\n __M_locals = __M_dict_builtin(pageargs=pageargs)\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns, [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns, [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns, [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns, [u'render_tool_shed_repository_actions'])\n status = _import_ns.get('status', context.get('status', UNDEFINED))\n render_clone_str = _import_ns.get('render_clone_str', context.get('render_clone_str', UNDEFINED))\n render_repository_type_select_field = _import_ns.get('render_repository_type_select_field', context.get('render_repository_type_select_field', UNDEFINED))\n render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository', UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n render_tool_shed_repository_actions = _import_ns.get('render_tool_shed_repository_actions', context.get('render_tool_shed_repository_actions', UNDEFINED))\n is_malicious = _import_ns.get('is_malicious', context.get('is_malicious', UNDEFINED))\n repository_type_select_field = _import_ns.get('repository_type_select_field', context.get('repository_type_select_field', UNDEFINED))\n commit_message = _import_ns.get('commit_message', context.get('commit_message', UNDEFINED))\n message = _import_ns.get('message', context.get('message', UNDEFINED))\n trans = _import_ns.get('trans', context.get('trans', UNDEFINED))\n __M_writer = context.writer()\n # SOURCE LINE 1\n __M_writer(u'\\n')\n # SOURCE LINE 2\n __M_writer(u'\\n')\n # SOURCE LINE 3\n __M_writer(u'\\n')\n # SOURCE LINE 4\n __M_writer(u'\\n')\n # SOURCE LINE 5\n __M_writer(u'\\n\\n')\n # SOURCE LINE 13\n __M_writer(u'\\n')\n # SOURCE LINE 14\n __M_writer(u'\\n\\n')\n # SOURCE LINE 19\n __M_writer(u'\\n\\n')\n # SOURCE LINE 25\n __M_writer(u'\\n\\n')\n # SOURCE LINE 27\n\n is_new = repository.is_new( trans.app )\n can_push = trans.app.security_agent.can_push( trans.app, trans.user, repository )\n can_download = not is_new and ( not is_malicious or can_push )\n can_browse_contents = not is_new\n \n \n __M_locals_builtin_stored = __M_locals_builtin()\n __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['can_push','can_browse_contents','is_new','can_download'] if __M_key in __M_locals_builtin_stored]))\n # SOURCE LINE 32\n __M_writer(u'\\n\\n')\n # SOURCE LINE 34\n __M_writer(unicode(render_tool_shed_repository_actions( repository )))\n __M_writer(u'\\n\\n')\n # SOURCE LINE 36\n if message:\n # SOURCE LINE 37\n __M_writer(u' ')\n __M_writer(unicode(render_msg( message, status )))\n __M_writer(u'\\n')\n pass\n # SOURCE LINE 39\n __M_writer(u'\\n')\n # SOURCE LINE 40\n if can_browse_contents:\n # SOURCE LINE 41\n __M_writer(u' <div class=\"toolForm\">\\n <div class=\"toolFormTitle\">Repository \\'')\n # SOURCE LINE 42\n __M_writer(filters.html_escape(unicode(repository.name )))\n __M_writer(u\"' revision \")\n __M_writer(filters.html_escape(unicode(repository.tip( trans.app ) )))\n __M_writer(u' (repository tip)</div>\\n')\n # SOURCE LINE 43\n if can_download:\n # SOURCE LINE 44\n __M_writer(u' <div class=\"form-row\">\\n <label>Clone this repository:</label>\\n ')\n # SOURCE LINE 46\n __M_writer(unicode(render_clone_str( repository )))\n __M_writer(u'\\n </div>\\n')\n pass\n # SOURCE LINE 49\n __M_writer(u' <form name=\"repository_type\">\\n ')\n # SOURCE LINE 50\n __M_writer(unicode(render_repository_type_select_field( repository_type_select_field, render_help=False )))\n __M_writer(u'\\n </form>\\n')\n # SOURCE LINE 52\n if can_push:\n # SOURCE LINE 53\n __M_writer(u' <form name=\"select_files_to_delete\" id=\"select_files_to_delete\" action=\"')\n __M_writer(unicode(h.url_for( controller='repository', action='select_files_to_delete', id=trans.security.encode_id( repository.id ))))\n __M_writer(u'\" method=\"post\" >\\n <div class=\"form-row\" >\\n <label>Contents:</label>\\n <div id=\"tree\" >\\n Loading...\\n </div>\\n <div class=\"toolParamHelp\" style=\"clear: both;\">\\n Click on a file to display it\\'s contents below. You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.\\n </div>\\n <input id=\"selected_files_to_delete\" name=\"selected_files_to_delete\" type=\"hidden\" value=\"\"/>\\n </div>\\n <div class=\"form-row\">\\n <label>Message:</label>\\n <div class=\"form-row-input\">\\n')\n # SOURCE LINE 67\n if commit_message:\n # SOURCE LINE 68\n __M_writer(u' <textarea name=\"commit_message\" rows=\"3\" cols=\"35\">')\n __M_writer(filters.html_escape(unicode(commit_message )))\n __M_writer(u'</textarea>\\n')\n # SOURCE LINE 69\n else:\n # SOURCE LINE 70\n __M_writer(u' <textarea name=\"commit_message\" rows=\"3\" cols=\"35\"></textarea>\\n')\n pass\n # SOURCE LINE 72\n __M_writer(u' </div>\\n <div class=\"toolParamHelp\" style=\"clear: both;\">\\n This is the commit message for the mercurial change set that will be created if you delete selected files.\\n </div>\\n <div style=\"clear: both\"></div>\\n </div>\\n <div class=\"form-row\">\\n <input type=\"submit\" name=\"select_files_to_delete_button\" value=\"Delete selected files\"/>\\n </div>\\n <div class=\"form-row\">\\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\\n </div>\\n </form>\\n')\n # SOURCE LINE 85\n else:\n # SOURCE LINE 86\n __M_writer(u' <div class=\"toolFormBody\">\\n <div class=\"form-row\" >\\n <label>Contents:</label>\\n <div id=\"tree\" >\\n Loading...\\n </div>\\n </div>\\n <div class=\"form-row\">\\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\\n </div>\\n </div>\\n')\n pass\n # SOURCE LINE 98\n __M_writer(u' </div>\\n <p/>\\n')\n pass\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\ndef render_stylesheets(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns, [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns, [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns, [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns, [u'render_tool_shed_repository_actions'])\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n # SOURCE LINE 16\n __M_writer(u'\\n ')\n # SOURCE LINE 17\n __M_writer(unicode(parent.stylesheets()))\n __M_writer(u'\\n ')\n # SOURCE LINE 18\n __M_writer(unicode(h.css( \"jquery.rating\", \"dynatree_skin/ui.dynatree\" )))\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\ndef render_javascripts(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns, [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns, [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns, [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns, [u'render_tool_shed_repository_actions'])\n common_javascripts = _import_ns.get('common_javascripts', context.get('common_javascripts', UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository', UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n # SOURCE LINE 21\n __M_writer(u'\\n ')\n # SOURCE LINE 22\n __M_writer(unicode(parent.javascripts()))\n __M_writer(u'\\n ')\n # SOURCE LINE 23\n __M_writer(unicode(h.js( \"libs/jquery/jquery.rating\", \"libs/jquery/jquery-ui\", \"libs/jquery/jquery.cookie\", \"libs/jquery/jquery.dynatree\" )))\n __M_writer(u'\\n ')\n # SOURCE LINE 24\n __M_writer(unicode(common_javascripts(repository)))\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\n",
"step-ids": [
3,
5,
7,
9,
10
]
}
|
[
3,
5,
7,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def favorite_book(name):
print(f'One of my favorite books is {name}...')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def favorite_book(name):
print(f'One of my favorite books is {name}...')
favorite_book('Alice in Wonderland')
<|reserved_special_token_1|>
def favorite_book(name):
print(f"One of my favorite books is {name}...")
favorite_book("Alice in Wonderland")
|
flexible
|
{
"blob_id": "08848e51d5564bad927607be3fa3c86f2c1212c5",
"index": 9668,
"step-1": "<mask token>\n",
"step-2": "def favorite_book(name):\n print(f'One of my favorite books is {name}...')\n\n\n<mask token>\n",
"step-3": "def favorite_book(name):\n print(f'One of my favorite books is {name}...')\n\n\nfavorite_book('Alice in Wonderland')\n",
"step-4": "def favorite_book(name):\n \n print(f\"One of my favorite books is {name}...\")\n\nfavorite_book(\"Alice in Wonderland\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class HostApi(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def getHostById(self, **kwargs):
"""Retrieves host based on id
Args:
id, str: Host Id (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostResult
"""
allParams = ['id', 'scope']
params = locals()
for key, val in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError(
"Got an unexpected keyword argument '%s' to method getHostById"
% key)
params[key] = val
del params['kwargs']
resourcePath = '/host/{id}'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if 'scope' in params:
headerParams['scope'] = params['scope']
if 'id' in params:
replacement = str(self.apiClient.toPathValue(params['id']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)
postData = formParams if formParams else bodyParam
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostResult')
return responseObject
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class HostApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
def getHosts(self, **kwargs):
"""Retrieve hosts
Args:
limit, str: limit (required)
offset, str: offset (required)
sortBy, str: sortBy (required)
order, str: order (required)
hostName, list[str]: hostName (required)
hostMac, list[str]: hostMac (required)
hostType, list[str]: hostType (required)
connectedInterfaceName, list[str]: connectedInterfaceName (required)
hostIp, list[str]: hostIp (required)
connectedDeviceIp, list[str]: connectedDeviceIp (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostListResult
"""
allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName',
'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp',
'connectedDeviceIp', 'scope']
params = locals()
for key, val in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError(
"Got an unexpected keyword argument '%s' to method getHosts"
% key)
params[key] = val
del params['kwargs']
resourcePath = '/host'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if 'limit' in params:
queryParams['limit'] = self.apiClient.toPathValue(params['limit'])
if 'offset' in params:
queryParams['offset'] = self.apiClient.toPathValue(params['offset']
)
if 'sortBy' in params:
queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy']
)
if 'order' in params:
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if 'hostName' in params:
queryParams['hostName'] = self.apiClient.toPathValue(params[
'hostName'])
if 'hostMac' in params:
queryParams['hostMac'] = self.apiClient.toPathValue(params[
'hostMac'])
if 'hostType' in params:
queryParams['hostType'] = self.apiClient.toPathValue(params[
'hostType'])
if 'connectedInterfaceName' in params:
queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(
params['connectedInterfaceName'])
if 'hostIp' in params:
queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp']
)
if 'connectedDeviceIp' in params:
queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(
params['connectedDeviceIp'])
if 'scope' in params:
headerParams['scope'] = params['scope']
postData = formParams if formParams else bodyParam
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostListResult')
return responseObject
<|reserved_special_token_0|>
def getHostById(self, **kwargs):
"""Retrieves host based on id
Args:
id, str: Host Id (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostResult
"""
allParams = ['id', 'scope']
params = locals()
for key, val in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError(
"Got an unexpected keyword argument '%s' to method getHostById"
% key)
params[key] = val
del params['kwargs']
resourcePath = '/host/{id}'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if 'scope' in params:
headerParams['scope'] = params['scope']
if 'id' in params:
replacement = str(self.apiClient.toPathValue(params['id']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)
postData = formParams if formParams else bodyParam
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostResult')
return responseObject
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class HostApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
def getHosts(self, **kwargs):
"""Retrieve hosts
Args:
limit, str: limit (required)
offset, str: offset (required)
sortBy, str: sortBy (required)
order, str: order (required)
hostName, list[str]: hostName (required)
hostMac, list[str]: hostMac (required)
hostType, list[str]: hostType (required)
connectedInterfaceName, list[str]: connectedInterfaceName (required)
hostIp, list[str]: hostIp (required)
connectedDeviceIp, list[str]: connectedDeviceIp (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostListResult
"""
allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName',
'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp',
'connectedDeviceIp', 'scope']
params = locals()
for key, val in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError(
"Got an unexpected keyword argument '%s' to method getHosts"
% key)
params[key] = val
del params['kwargs']
resourcePath = '/host'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if 'limit' in params:
queryParams['limit'] = self.apiClient.toPathValue(params['limit'])
if 'offset' in params:
queryParams['offset'] = self.apiClient.toPathValue(params['offset']
)
if 'sortBy' in params:
queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy']
)
if 'order' in params:
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if 'hostName' in params:
queryParams['hostName'] = self.apiClient.toPathValue(params[
'hostName'])
if 'hostMac' in params:
queryParams['hostMac'] = self.apiClient.toPathValue(params[
'hostMac'])
if 'hostType' in params:
queryParams['hostType'] = self.apiClient.toPathValue(params[
'hostType'])
if 'connectedInterfaceName' in params:
queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(
params['connectedInterfaceName'])
if 'hostIp' in params:
queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp']
)
if 'connectedDeviceIp' in params:
queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(
params['connectedDeviceIp'])
if 'scope' in params:
headerParams['scope'] = params['scope']
postData = formParams if formParams else bodyParam
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostListResult')
return responseObject
def getHostCount(self, **kwargs):
"""Gives total number of hosts
Args:
scope, str: Authorization Scope for RBAC (required)
Returns: CountResult
"""
allParams = ['scope']
params = locals()
for key, val in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError(
"Got an unexpected keyword argument '%s' to method getHostCount"
% key)
params[key] = val
del params['kwargs']
resourcePath = '/host/count'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if 'scope' in params:
headerParams['scope'] = params['scope']
postData = formParams if formParams else bodyParam
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'CountResult')
return responseObject
def getHostById(self, **kwargs):
"""Retrieves host based on id
Args:
id, str: Host Id (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostResult
"""
allParams = ['id', 'scope']
params = locals()
for key, val in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError(
"Got an unexpected keyword argument '%s' to method getHostById"
% key)
params[key] = val
del params['kwargs']
resourcePath = '/host/{id}'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if 'scope' in params:
headerParams['scope'] = params['scope']
if 'id' in params:
replacement = str(self.apiClient.toPathValue(params['id']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)
postData = formParams if formParams else bodyParam
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostResult')
return responseObject
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import sys
import os
import urllib.request, urllib.parse, urllib.error
from .models import *
class HostApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
def getHosts(self, **kwargs):
"""Retrieve hosts
Args:
limit, str: limit (required)
offset, str: offset (required)
sortBy, str: sortBy (required)
order, str: order (required)
hostName, list[str]: hostName (required)
hostMac, list[str]: hostMac (required)
hostType, list[str]: hostType (required)
connectedInterfaceName, list[str]: connectedInterfaceName (required)
hostIp, list[str]: hostIp (required)
connectedDeviceIp, list[str]: connectedDeviceIp (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostListResult
"""
allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName',
'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp',
'connectedDeviceIp', 'scope']
params = locals()
for key, val in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError(
"Got an unexpected keyword argument '%s' to method getHosts"
% key)
params[key] = val
del params['kwargs']
resourcePath = '/host'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if 'limit' in params:
queryParams['limit'] = self.apiClient.toPathValue(params['limit'])
if 'offset' in params:
queryParams['offset'] = self.apiClient.toPathValue(params['offset']
)
if 'sortBy' in params:
queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy']
)
if 'order' in params:
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if 'hostName' in params:
queryParams['hostName'] = self.apiClient.toPathValue(params[
'hostName'])
if 'hostMac' in params:
queryParams['hostMac'] = self.apiClient.toPathValue(params[
'hostMac'])
if 'hostType' in params:
queryParams['hostType'] = self.apiClient.toPathValue(params[
'hostType'])
if 'connectedInterfaceName' in params:
queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(
params['connectedInterfaceName'])
if 'hostIp' in params:
queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp']
)
if 'connectedDeviceIp' in params:
queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(
params['connectedDeviceIp'])
if 'scope' in params:
headerParams['scope'] = params['scope']
postData = formParams if formParams else bodyParam
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostListResult')
return responseObject
def getHostCount(self, **kwargs):
"""Gives total number of hosts
Args:
scope, str: Authorization Scope for RBAC (required)
Returns: CountResult
"""
allParams = ['scope']
params = locals()
for key, val in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError(
"Got an unexpected keyword argument '%s' to method getHostCount"
% key)
params[key] = val
del params['kwargs']
resourcePath = '/host/count'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if 'scope' in params:
headerParams['scope'] = params['scope']
postData = formParams if formParams else bodyParam
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'CountResult')
return responseObject
def getHostById(self, **kwargs):
"""Retrieves host based on id
Args:
id, str: Host Id (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostResult
"""
allParams = ['id', 'scope']
params = locals()
for key, val in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError(
"Got an unexpected keyword argument '%s' to method getHostById"
% key)
params[key] = val
del params['kwargs']
resourcePath = '/host/{id}'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if 'scope' in params:
headerParams['scope'] = params['scope']
if 'id' in params:
replacement = str(self.apiClient.toPathValue(params['id']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)
postData = formParams if formParams else bodyParam
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostResult')
return responseObject
<|reserved_special_token_1|>
#!/usr/bin/env python
#pylint: skip-file
"""
HostApi.py
Copyright 2016 Cisco Systems
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
import urllib.request, urllib.parse, urllib.error
from .models import *
class HostApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
def getHosts(self, **kwargs):
"""Retrieve hosts
Args:
limit, str: limit (required)
offset, str: offset (required)
sortBy, str: sortBy (required)
order, str: order (required)
hostName, list[str]: hostName (required)
hostMac, list[str]: hostMac (required)
hostType, list[str]: hostType (required)
connectedInterfaceName, list[str]: connectedInterfaceName (required)
hostIp, list[str]: hostIp (required)
connectedDeviceIp, list[str]: connectedDeviceIp (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostListResult
"""
allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName', 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp', 'connectedDeviceIp', 'scope']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getHosts" % key)
params[key] = val
del params['kwargs']
resourcePath = '/host'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('limit' in params):
queryParams['limit'] = self.apiClient.toPathValue(params['limit'])
if ('offset' in params):
queryParams['offset'] = self.apiClient.toPathValue(params['offset'])
if ('sortBy' in params):
queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy'])
if ('order' in params):
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if ('hostName' in params):
queryParams['hostName'] = self.apiClient.toPathValue(params['hostName'])
if ('hostMac' in params):
queryParams['hostMac'] = self.apiClient.toPathValue(params['hostMac'])
if ('hostType' in params):
queryParams['hostType'] = self.apiClient.toPathValue(params['hostType'])
if ('connectedInterfaceName' in params):
queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(params['connectedInterfaceName'])
if ('hostIp' in params):
queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp'])
if ('connectedDeviceIp' in params):
queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(params['connectedDeviceIp'])
if ('scope' in params):
headerParams['scope'] = params['scope']
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostListResult')
return responseObject
def getHostCount(self, **kwargs):
"""Gives total number of hosts
Args:
scope, str: Authorization Scope for RBAC (required)
Returns: CountResult
"""
allParams = ['scope']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getHostCount" % key)
params[key] = val
del params['kwargs']
resourcePath = '/host/count'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'CountResult')
return responseObject
def getHostById(self, **kwargs):
"""Retrieves host based on id
Args:
id, str: Host Id (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostResult
"""
allParams = ['id', 'scope']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getHostById" % key)
params[key] = val
del params['kwargs']
resourcePath = '/host/{id}'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
if ('id' in params):
replacement = str(self.apiClient.toPathValue(params['id']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'id' + '}',
replacement)
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostResult')
return responseObject
|
flexible
|
{
"blob_id": "4243c863827f1378c364171ca7d8fdabd42be22f",
"index": 3625,
"step-1": "<mask token>\n\n\nclass HostApi(object):\n <mask token>\n <mask token>\n <mask token>\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n allParams = ['id', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostById\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n if 'id' in params:\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n",
"step-2": "<mask token>\n\n\nclass HostApi(object):\n\n def __init__(self, apiClient):\n self.apiClient = apiClient\n\n def getHosts(self, **kwargs):\n \"\"\"Retrieve hosts\n\n Args:\n\n limit, str: limit (required)\n\n\n offset, str: offset (required)\n\n\n sortBy, str: sortBy (required)\n\n\n order, str: order (required)\n\n\n hostName, list[str]: hostName (required)\n\n\n hostMac, list[str]: hostMac (required)\n\n\n hostType, list[str]: hostType (required)\n\n\n connectedInterfaceName, list[str]: connectedInterfaceName (required)\n\n\n hostIp, list[str]: hostIp (required)\n\n\n connectedDeviceIp, list[str]: connectedDeviceIp (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostListResult\n \"\"\"\n allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName',\n 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp',\n 'connectedDeviceIp', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHosts\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'limit' in params:\n queryParams['limit'] = self.apiClient.toPathValue(params['limit'])\n if 'offset' in params:\n queryParams['offset'] = self.apiClient.toPathValue(params['offset']\n )\n if 'sortBy' in params:\n queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy']\n )\n if 'order' in params:\n queryParams['order'] = self.apiClient.toPathValue(params['order'])\n if 'hostName' in params:\n queryParams['hostName'] = self.apiClient.toPathValue(params[\n 'hostName'])\n if 'hostMac' in params:\n queryParams['hostMac'] = self.apiClient.toPathValue(params[\n 'hostMac'])\n if 'hostType' in params:\n queryParams['hostType'] = self.apiClient.toPathValue(params[\n 'hostType'])\n if 'connectedInterfaceName' in params:\n queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(\n params['connectedInterfaceName'])\n if 'hostIp' in params:\n queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp']\n )\n if 'connectedDeviceIp' in params:\n queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(\n params['connectedDeviceIp'])\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostListResult')\n return responseObject\n <mask token>\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n allParams = ['id', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostById\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n if 'id' in params:\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n",
"step-3": "<mask token>\n\n\nclass HostApi(object):\n\n def __init__(self, apiClient):\n self.apiClient = apiClient\n\n def getHosts(self, **kwargs):\n \"\"\"Retrieve hosts\n\n Args:\n\n limit, str: limit (required)\n\n\n offset, str: offset (required)\n\n\n sortBy, str: sortBy (required)\n\n\n order, str: order (required)\n\n\n hostName, list[str]: hostName (required)\n\n\n hostMac, list[str]: hostMac (required)\n\n\n hostType, list[str]: hostType (required)\n\n\n connectedInterfaceName, list[str]: connectedInterfaceName (required)\n\n\n hostIp, list[str]: hostIp (required)\n\n\n connectedDeviceIp, list[str]: connectedDeviceIp (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostListResult\n \"\"\"\n allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName',\n 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp',\n 'connectedDeviceIp', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHosts\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'limit' in params:\n queryParams['limit'] = self.apiClient.toPathValue(params['limit'])\n if 'offset' in params:\n queryParams['offset'] = self.apiClient.toPathValue(params['offset']\n )\n if 'sortBy' in params:\n queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy']\n )\n if 'order' in params:\n queryParams['order'] = self.apiClient.toPathValue(params['order'])\n if 'hostName' in params:\n queryParams['hostName'] = self.apiClient.toPathValue(params[\n 'hostName'])\n if 'hostMac' in params:\n queryParams['hostMac'] = self.apiClient.toPathValue(params[\n 'hostMac'])\n if 'hostType' in params:\n queryParams['hostType'] = self.apiClient.toPathValue(params[\n 'hostType'])\n if 'connectedInterfaceName' in params:\n queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(\n params['connectedInterfaceName'])\n if 'hostIp' in params:\n queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp']\n )\n if 'connectedDeviceIp' in params:\n queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(\n params['connectedDeviceIp'])\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostListResult')\n return responseObject\n\n def getHostCount(self, **kwargs):\n \"\"\"Gives total number of hosts\n\n Args:\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: CountResult\n \"\"\"\n allParams = ['scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostCount\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/count'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'CountResult')\n return responseObject\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n allParams = ['id', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostById\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n if 'id' in params:\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n",
"step-4": "<mask token>\nimport sys\nimport os\nimport urllib.request, urllib.parse, urllib.error\nfrom .models import *\n\n\nclass HostApi(object):\n\n def __init__(self, apiClient):\n self.apiClient = apiClient\n\n def getHosts(self, **kwargs):\n \"\"\"Retrieve hosts\n\n Args:\n\n limit, str: limit (required)\n\n\n offset, str: offset (required)\n\n\n sortBy, str: sortBy (required)\n\n\n order, str: order (required)\n\n\n hostName, list[str]: hostName (required)\n\n\n hostMac, list[str]: hostMac (required)\n\n\n hostType, list[str]: hostType (required)\n\n\n connectedInterfaceName, list[str]: connectedInterfaceName (required)\n\n\n hostIp, list[str]: hostIp (required)\n\n\n connectedDeviceIp, list[str]: connectedDeviceIp (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostListResult\n \"\"\"\n allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName',\n 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp',\n 'connectedDeviceIp', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHosts\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'limit' in params:\n queryParams['limit'] = self.apiClient.toPathValue(params['limit'])\n if 'offset' in params:\n queryParams['offset'] = self.apiClient.toPathValue(params['offset']\n )\n if 'sortBy' in params:\n queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy']\n )\n if 'order' in params:\n queryParams['order'] = self.apiClient.toPathValue(params['order'])\n if 'hostName' in params:\n queryParams['hostName'] = self.apiClient.toPathValue(params[\n 'hostName'])\n if 'hostMac' in params:\n queryParams['hostMac'] = self.apiClient.toPathValue(params[\n 'hostMac'])\n if 'hostType' in params:\n queryParams['hostType'] = self.apiClient.toPathValue(params[\n 'hostType'])\n if 'connectedInterfaceName' in params:\n queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(\n params['connectedInterfaceName'])\n if 'hostIp' in params:\n queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp']\n )\n if 'connectedDeviceIp' in params:\n queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(\n params['connectedDeviceIp'])\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostListResult')\n return responseObject\n\n def getHostCount(self, **kwargs):\n \"\"\"Gives total number of hosts\n\n Args:\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: CountResult\n \"\"\"\n allParams = ['scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostCount\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/count'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'CountResult')\n return responseObject\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n allParams = ['id', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostById\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n if 'id' in params:\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n",
"step-5": "#!/usr/bin/env python\n#pylint: skip-file\n\"\"\"\nHostApi.py\n Copyright 2016 Cisco Systems\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n\n\"\"\"\nimport sys\nimport os\nimport urllib.request, urllib.parse, urllib.error\n\nfrom .models import *\n\n\nclass HostApi(object):\n\n def __init__(self, apiClient):\n self.apiClient = apiClient\n\n\n\n def getHosts(self, **kwargs):\n \"\"\"Retrieve hosts\n\n Args:\n\n limit, str: limit (required)\n\n\n offset, str: offset (required)\n\n\n sortBy, str: sortBy (required)\n\n\n order, str: order (required)\n\n\n hostName, list[str]: hostName (required)\n\n\n hostMac, list[str]: hostMac (required)\n\n\n hostType, list[str]: hostType (required)\n\n\n connectedInterfaceName, list[str]: connectedInterfaceName (required)\n\n\n hostIp, list[str]: hostIp (required)\n\n\n connectedDeviceIp, list[str]: connectedDeviceIp (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostListResult\n \"\"\"\n\n allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName', 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp', 'connectedDeviceIp', 'scope']\n\n params = locals()\n for (key, val) in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\"Got an unexpected keyword argument '%s' to method getHosts\" % key)\n params[key] = val\n del params['kwargs']\n\n resourcePath = '/host'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n\n\n if ('limit' in params):\n queryParams['limit'] = self.apiClient.toPathValue(params['limit'])\n\n if ('offset' in params):\n queryParams['offset'] = self.apiClient.toPathValue(params['offset'])\n\n if ('sortBy' in params):\n queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy'])\n\n if ('order' in params):\n queryParams['order'] = self.apiClient.toPathValue(params['order'])\n\n if ('hostName' in params):\n queryParams['hostName'] = self.apiClient.toPathValue(params['hostName'])\n\n if ('hostMac' in params):\n queryParams['hostMac'] = self.apiClient.toPathValue(params['hostMac'])\n\n if ('hostType' in params):\n queryParams['hostType'] = self.apiClient.toPathValue(params['hostType'])\n\n if ('connectedInterfaceName' in params):\n queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(params['connectedInterfaceName'])\n\n if ('hostIp' in params):\n queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp'])\n\n if ('connectedDeviceIp' in params):\n queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(params['connectedDeviceIp'])\n\n\n\n if ('scope' in params):\n headerParams['scope'] = params['scope']\n\n\n\n\n\n\n\n\n postData = (formParams if formParams else bodyParam)\n\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n\n\n if not response:\n return None\n\n responseObject = self.apiClient.deserialize(response, 'HostListResult')\n return responseObject\n\n\n\n\n def getHostCount(self, **kwargs):\n \"\"\"Gives total number of hosts\n\n Args:\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: CountResult\n \"\"\"\n\n allParams = ['scope']\n\n params = locals()\n for (key, val) in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\"Got an unexpected keyword argument '%s' to method getHostCount\" % key)\n params[key] = val\n del params['kwargs']\n\n resourcePath = '/host/count'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n\n\n\n\n if ('scope' in params):\n headerParams['scope'] = params['scope']\n\n\n\n\n\n\n\n\n postData = (formParams if formParams else bodyParam)\n\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n\n\n if not response:\n return None\n\n responseObject = self.apiClient.deserialize(response, 'CountResult')\n return responseObject\n\n\n\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n\n allParams = ['id', 'scope']\n\n params = locals()\n for (key, val) in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\"Got an unexpected keyword argument '%s' to method getHostById\" % key)\n params[key] = val\n del params['kwargs']\n\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n\n\n\n\n if ('scope' in params):\n headerParams['scope'] = params['scope']\n\n\n\n if ('id' in params):\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}',\n replacement)\n\n\n\n\n\n\n postData = (formParams if formParams else bodyParam)\n\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n\n\n if not response:\n return None\n\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n\n\n\n\n\n\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
def load_data_from_csv(file_name, header=0, encoding='utf-8'):
data_df = pd.read_csv(file_name, header=header, encoding=encoding)
return data_df
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def load_data_from_csv(file_name, header=0, encoding='utf-8'):
data_df = pd.read_csv(file_name, header=header, encoding=encoding)
return data_df
<|reserved_special_token_0|>
out.write(data['content'][0].encode('utf-8'))
inp.close()
out.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
outf = 'test.txt'
inf = 'remove_items.txt'
out = open(outf, 'w')
inp = open(inf, 'r')
validate_data_path = (
'/data1/hjw/fine_grit_emotion_analysis/validation/ai_challenger_sentiment_analysis_validationset_20180816/sentiment_analysis_validationset.csv'
)
train_data_path = (
'/data1/hjw/fine_grit_emotion_analysis/train/ai_challenger_sentiment_analysis_trainingset_20180816/sentiment_analysis_trainingset.csv'
)
def load_data_from_csv(file_name, header=0, encoding='utf-8'):
data_df = pd.read_csv(file_name, header=header, encoding=encoding)
return data_df
data = load_data_from_csv(validate_data_path)
out.write(data['content'][0].encode('utf-8'))
inp.close()
out.close()
<|reserved_special_token_1|>
import re
import sys
import os
import pandas as pd
import jieba
import logging
import argparse
from sklearn.externals import joblib
from sklearn.svm import SVC
from sklearn.naive_bayes import MultinomialNB
from sklearn.metrics import f1_score, accuracy_score
from sklearn.feature_extraction.text import TfidfVectorizer
import numpy as np
from sklearn.externals import joblib
import os
import argparse
import keras as ks
from sklearn.model_selection import train_test_split
import pdb
import logging
from pyfasttext import FastText
outf = 'test.txt'
inf = 'remove_items.txt'
out = open(outf, 'w')
inp = open(inf, 'r')
validate_data_path = (
'/data1/hjw/fine_grit_emotion_analysis/validation/ai_challenger_sentiment_analysis_validationset_20180816/sentiment_analysis_validationset.csv'
)
train_data_path = (
'/data1/hjw/fine_grit_emotion_analysis/train/ai_challenger_sentiment_analysis_trainingset_20180816/sentiment_analysis_trainingset.csv'
)
def load_data_from_csv(file_name, header=0, encoding='utf-8'):
data_df = pd.read_csv(file_name, header=header, encoding=encoding)
return data_df
data = load_data_from_csv(validate_data_path)
out.write(data['content'][0].encode('utf-8'))
inp.close()
out.close()
<|reserved_special_token_1|>
#-*- coding: utf-8 -*-
import re
import sys
import os
import pandas as pd
import jieba
import logging
import argparse
from sklearn.externals import joblib
from sklearn.svm import SVC
from sklearn.naive_bayes import MultinomialNB
from sklearn.metrics import f1_score,accuracy_score
from sklearn.feature_extraction.text import TfidfVectorizer
import numpy as np
from sklearn.externals import joblib
import os
import argparse
import keras as ks
from sklearn.model_selection import train_test_split
#from keras.utils.np_utils import to_categorical
#from keras.models import Sequential
#from keras import layers
import pdb
import logging
from pyfasttext import FastText
outf = "test.txt"
inf = "remove_items.txt"
out = open(outf,'w')
inp = open(inf,'r')
#i = inp.readline()
#print(type(i))
#out.write(inp.readline())
validate_data_path = "/data1/hjw/fine_grit_emotion_analysis/validation/ai_challenger_sentiment_analysis_validationset_20180816/sentiment_analysis_validationset.csv"
train_data_path = "/data1/hjw/fine_grit_emotion_analysis/train/ai_challenger_sentiment_analysis_trainingset_20180816/sentiment_analysis_trainingset.csv"
#load the data
def load_data_from_csv(file_name, header=0, encoding="utf-8"):
data_df = pd.read_csv(file_name, header=header, encoding=encoding)
return data_df
#train = load_data_from(train_data_path)
data = load_data_from_csv(validate_data_path)
out.write(data['content'][0].encode('utf-8'))
inp.close()
out.close()
|
flexible
|
{
"blob_id": "c879230efe12bde9042159da221a2b9b4c1d8349",
"index": 198,
"step-1": "<mask token>\n\n\ndef load_data_from_csv(file_name, header=0, encoding='utf-8'):\n data_df = pd.read_csv(file_name, header=header, encoding=encoding)\n return data_df\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef load_data_from_csv(file_name, header=0, encoding='utf-8'):\n data_df = pd.read_csv(file_name, header=header, encoding=encoding)\n return data_df\n\n\n<mask token>\nout.write(data['content'][0].encode('utf-8'))\ninp.close()\nout.close()\n",
"step-3": "<mask token>\noutf = 'test.txt'\ninf = 'remove_items.txt'\nout = open(outf, 'w')\ninp = open(inf, 'r')\nvalidate_data_path = (\n '/data1/hjw/fine_grit_emotion_analysis/validation/ai_challenger_sentiment_analysis_validationset_20180816/sentiment_analysis_validationset.csv'\n )\ntrain_data_path = (\n '/data1/hjw/fine_grit_emotion_analysis/train/ai_challenger_sentiment_analysis_trainingset_20180816/sentiment_analysis_trainingset.csv'\n )\n\n\ndef load_data_from_csv(file_name, header=0, encoding='utf-8'):\n data_df = pd.read_csv(file_name, header=header, encoding=encoding)\n return data_df\n\n\ndata = load_data_from_csv(validate_data_path)\nout.write(data['content'][0].encode('utf-8'))\ninp.close()\nout.close()\n",
"step-4": "import re\nimport sys\nimport os\nimport pandas as pd\nimport jieba\nimport logging\nimport argparse\nfrom sklearn.externals import joblib\nfrom sklearn.svm import SVC\nfrom sklearn.naive_bayes import MultinomialNB\nfrom sklearn.metrics import f1_score, accuracy_score\nfrom sklearn.feature_extraction.text import TfidfVectorizer\nimport numpy as np\nfrom sklearn.externals import joblib\nimport os\nimport argparse\nimport keras as ks\nfrom sklearn.model_selection import train_test_split\nimport pdb\nimport logging\nfrom pyfasttext import FastText\noutf = 'test.txt'\ninf = 'remove_items.txt'\nout = open(outf, 'w')\ninp = open(inf, 'r')\nvalidate_data_path = (\n '/data1/hjw/fine_grit_emotion_analysis/validation/ai_challenger_sentiment_analysis_validationset_20180816/sentiment_analysis_validationset.csv'\n )\ntrain_data_path = (\n '/data1/hjw/fine_grit_emotion_analysis/train/ai_challenger_sentiment_analysis_trainingset_20180816/sentiment_analysis_trainingset.csv'\n )\n\n\ndef load_data_from_csv(file_name, header=0, encoding='utf-8'):\n data_df = pd.read_csv(file_name, header=header, encoding=encoding)\n return data_df\n\n\ndata = load_data_from_csv(validate_data_path)\nout.write(data['content'][0].encode('utf-8'))\ninp.close()\nout.close()\n",
"step-5": "#-*- coding: utf-8 -*-\nimport re\nimport sys\nimport os\nimport pandas as pd\nimport jieba\nimport logging\nimport argparse\nfrom sklearn.externals import joblib\nfrom sklearn.svm import SVC\nfrom sklearn.naive_bayes import MultinomialNB\nfrom sklearn.metrics import f1_score,accuracy_score\nfrom sklearn.feature_extraction.text import TfidfVectorizer\nimport numpy as np\nfrom sklearn.externals import joblib\nimport os\nimport argparse\nimport keras as ks\nfrom sklearn.model_selection import train_test_split\n#from keras.utils.np_utils import to_categorical\n#from keras.models import Sequential\n#from keras import layers\nimport pdb\nimport logging\nfrom pyfasttext import FastText\n\noutf = \"test.txt\"\ninf = \"remove_items.txt\"\n\nout = open(outf,'w')\ninp = open(inf,'r')\n\n#i = inp.readline()\n#print(type(i))\n#out.write(inp.readline())\n\n\nvalidate_data_path = \"/data1/hjw/fine_grit_emotion_analysis/validation/ai_challenger_sentiment_analysis_validationset_20180816/sentiment_analysis_validationset.csv\"\ntrain_data_path = \"/data1/hjw/fine_grit_emotion_analysis/train/ai_challenger_sentiment_analysis_trainingset_20180816/sentiment_analysis_trainingset.csv\"\n#load the data\ndef load_data_from_csv(file_name, header=0, encoding=\"utf-8\"):\n\n data_df = pd.read_csv(file_name, header=header, encoding=encoding)\n\n return data_df\n\n#train = load_data_from(train_data_path)\ndata = load_data_from_csv(validate_data_path)\n\nout.write(data['content'][0].encode('utf-8'))\n\ninp.close()\nout.close()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def writeUniquerecords(dirpath, filenames):
sourcepath = os.path.join(dirpath, filenames)
with open(sourcepath, 'r') as fp:
lines = fp.readlines()
destination_lines = []
for line in lines:
if line not in destination_lines:
destination_lines.append(line)
destinationfile = (
'/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/UpdatedFolder'
)
destipath = os.path.join(destinationfile, filenames)
with open(destipath, 'w+') as destination:
destination.write('\n'.join(destination_lines))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def writeUniquerecords(dirpath, filenames):
sourcepath = os.path.join(dirpath, filenames)
with open(sourcepath, 'r') as fp:
lines = fp.readlines()
destination_lines = []
for line in lines:
if line not in destination_lines:
destination_lines.append(line)
destinationfile = (
'/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/UpdatedFolder'
)
destipath = os.path.join(destinationfile, filenames)
with open(destipath, 'w+') as destination:
destination.write('\n'.join(destination_lines))
def Readandwrite():
for dirpath, dirnames, filenames in os.walk(
'/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/locators'
):
print('Current Path', dirpath)
print('Current Folder names', dirnames)
print('Current Files names ', filenames)
for file in filenames:
writeUniquerecords(dirpath, file)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def writeUniquerecords(dirpath, filenames):
sourcepath = os.path.join(dirpath, filenames)
with open(sourcepath, 'r') as fp:
lines = fp.readlines()
destination_lines = []
for line in lines:
if line not in destination_lines:
destination_lines.append(line)
destinationfile = (
'/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/UpdatedFolder'
)
destipath = os.path.join(destinationfile, filenames)
with open(destipath, 'w+') as destination:
destination.write('\n'.join(destination_lines))
def Readandwrite():
for dirpath, dirnames, filenames in os.walk(
'/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/locators'
):
print('Current Path', dirpath)
print('Current Folder names', dirnames)
print('Current Files names ', filenames)
for file in filenames:
writeUniquerecords(dirpath, file)
Readandwrite()
<|reserved_special_token_1|>
import os
from test.test_unicode_file_functions import filenames
def writeUniquerecords(dirpath, filenames):
sourcepath = os.path.join(dirpath, filenames)
with open(sourcepath, 'r') as fp:
lines = fp.readlines()
destination_lines = []
for line in lines:
if line not in destination_lines:
destination_lines.append(line)
destinationfile = (
'/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/UpdatedFolder'
)
destipath = os.path.join(destinationfile, filenames)
with open(destipath, 'w+') as destination:
destination.write('\n'.join(destination_lines))
def Readandwrite():
for dirpath, dirnames, filenames in os.walk(
'/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/locators'
):
print('Current Path', dirpath)
print('Current Folder names', dirnames)
print('Current Files names ', filenames)
for file in filenames:
writeUniquerecords(dirpath, file)
Readandwrite()
<|reserved_special_token_1|>
import os
from test.test_unicode_file_functions import filenames
def writeUniquerecords(dirpath,filenames):
sourcepath=os.path.join(dirpath,filenames)
with open(sourcepath,'r') as fp:
lines= fp.readlines()
destination_lines=[]
for line in lines:
if line not in destination_lines:
destination_lines.append(line)
destinationfile='/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/UpdatedFolder'
destipath=os.path.join(destinationfile,filenames)
with open(destipath, "w+")as destination:
destination.write("\n".join(destination_lines))
def Readandwrite():
for dirpath,dirnames,filenames in os.walk('/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/locators'):
print('Current Path',dirpath)
print('Current Folder names',dirnames)
print('Current Files names ',filenames)
for file in filenames:
writeUniquerecords(dirpath,file)
Readandwrite()
|
flexible
|
{
"blob_id": "4ed730369cf065936569a8515de44042829c2143",
"index": 1201,
"step-1": "<mask token>\n\n\ndef writeUniquerecords(dirpath, filenames):\n sourcepath = os.path.join(dirpath, filenames)\n with open(sourcepath, 'r') as fp:\n lines = fp.readlines()\n destination_lines = []\n for line in lines:\n if line not in destination_lines:\n destination_lines.append(line)\n destinationfile = (\n '/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/UpdatedFolder'\n )\n destipath = os.path.join(destinationfile, filenames)\n with open(destipath, 'w+') as destination:\n destination.write('\\n'.join(destination_lines))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef writeUniquerecords(dirpath, filenames):\n sourcepath = os.path.join(dirpath, filenames)\n with open(sourcepath, 'r') as fp:\n lines = fp.readlines()\n destination_lines = []\n for line in lines:\n if line not in destination_lines:\n destination_lines.append(line)\n destinationfile = (\n '/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/UpdatedFolder'\n )\n destipath = os.path.join(destinationfile, filenames)\n with open(destipath, 'w+') as destination:\n destination.write('\\n'.join(destination_lines))\n\n\ndef Readandwrite():\n for dirpath, dirnames, filenames in os.walk(\n '/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/locators'\n ):\n print('Current Path', dirpath)\n print('Current Folder names', dirnames)\n print('Current Files names ', filenames)\n for file in filenames:\n writeUniquerecords(dirpath, file)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef writeUniquerecords(dirpath, filenames):\n sourcepath = os.path.join(dirpath, filenames)\n with open(sourcepath, 'r') as fp:\n lines = fp.readlines()\n destination_lines = []\n for line in lines:\n if line not in destination_lines:\n destination_lines.append(line)\n destinationfile = (\n '/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/UpdatedFolder'\n )\n destipath = os.path.join(destinationfile, filenames)\n with open(destipath, 'w+') as destination:\n destination.write('\\n'.join(destination_lines))\n\n\ndef Readandwrite():\n for dirpath, dirnames, filenames in os.walk(\n '/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/locators'\n ):\n print('Current Path', dirpath)\n print('Current Folder names', dirnames)\n print('Current Files names ', filenames)\n for file in filenames:\n writeUniquerecords(dirpath, file)\n\n\nReadandwrite()\n",
"step-4": "import os\nfrom test.test_unicode_file_functions import filenames\n\n\ndef writeUniquerecords(dirpath, filenames):\n sourcepath = os.path.join(dirpath, filenames)\n with open(sourcepath, 'r') as fp:\n lines = fp.readlines()\n destination_lines = []\n for line in lines:\n if line not in destination_lines:\n destination_lines.append(line)\n destinationfile = (\n '/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/UpdatedFolder'\n )\n destipath = os.path.join(destinationfile, filenames)\n with open(destipath, 'w+') as destination:\n destination.write('\\n'.join(destination_lines))\n\n\ndef Readandwrite():\n for dirpath, dirnames, filenames in os.walk(\n '/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/locators'\n ):\n print('Current Path', dirpath)\n print('Current Folder names', dirnames)\n print('Current Files names ', filenames)\n for file in filenames:\n writeUniquerecords(dirpath, file)\n\n\nReadandwrite()\n",
"step-5": "import os\nfrom test.test_unicode_file_functions import filenames\n\n\ndef writeUniquerecords(dirpath,filenames):\n sourcepath=os.path.join(dirpath,filenames)\n with open(sourcepath,'r') as fp:\n lines= fp.readlines()\n destination_lines=[]\n for line in lines:\n if line not in destination_lines:\n destination_lines.append(line)\n \n destinationfile='/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/UpdatedFolder'\n destipath=os.path.join(destinationfile,filenames)\n with open(destipath, \"w+\")as destination:\n destination.write(\"\\n\".join(destination_lines)) \n\n\ndef Readandwrite():\n for dirpath,dirnames,filenames in os.walk('/Users/vijayakarthikeyanarul/git/python_Skills/com/filehandling/locators'):\n print('Current Path',dirpath)\n print('Current Folder names',dirnames)\n print('Current Files names ',filenames)\n for file in filenames:\n writeUniquerecords(dirpath,file)\n \n \n\nReadandwrite()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
driver.get('https://www.google.ca/imghp?hl=en&tab=ri&authuser=0&ogbl')
<|reserved_special_token_0|>
for name in names:
box = driver.find_element_by_xpath('//*[@id="sbtc"]/div/div[2]/input')
box.send_keys(name + str(' cover ps4'))
box.send_keys(Keys.ENTER)
for i in range(0, 1):
try:
driver.find_element_by_xpath(
'//*[@id="islrg"]/div[1]/div[1]/a[1]/div[1]/img').screenshot(
'C:/Users/AAYUSH/OneDrive/Desktop/labels/images/image(' +
str(k) + ').png')
k = k + 1
except:
pass
driver.get('https://www.google.ca/imghp?hl=en&tab=ri&authuser=0&ogbl')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
df = pd.read_csv('games_data.csv')
names = df['game']
driver = webdriver.Chrome('D:/chromedriver.exe')
driver.get('https://www.google.ca/imghp?hl=en&tab=ri&authuser=0&ogbl')
k = 0
for name in names:
box = driver.find_element_by_xpath('//*[@id="sbtc"]/div/div[2]/input')
box.send_keys(name + str(' cover ps4'))
box.send_keys(Keys.ENTER)
for i in range(0, 1):
try:
driver.find_element_by_xpath(
'//*[@id="islrg"]/div[1]/div[1]/a[1]/div[1]/img').screenshot(
'C:/Users/AAYUSH/OneDrive/Desktop/labels/images/image(' +
str(k) + ').png')
k = k + 1
except:
pass
driver.get('https://www.google.ca/imghp?hl=en&tab=ri&authuser=0&ogbl')
<|reserved_special_token_1|>
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
import pandas as pd
df = pd.read_csv('games_data.csv')
names = df['game']
driver = webdriver.Chrome('D:/chromedriver.exe')
driver.get('https://www.google.ca/imghp?hl=en&tab=ri&authuser=0&ogbl')
k = 0
for name in names:
box = driver.find_element_by_xpath('//*[@id="sbtc"]/div/div[2]/input')
box.send_keys(name + str(' cover ps4'))
box.send_keys(Keys.ENTER)
for i in range(0, 1):
try:
driver.find_element_by_xpath(
'//*[@id="islrg"]/div[1]/div[1]/a[1]/div[1]/img').screenshot(
'C:/Users/AAYUSH/OneDrive/Desktop/labels/images/image(' +
str(k) + ').png')
k = k + 1
except:
pass
driver.get('https://www.google.ca/imghp?hl=en&tab=ri&authuser=0&ogbl')
|
flexible
|
{
"blob_id": "6375ac80b081b7eafbc5c3fc7e84c4eff2604848",
"index": 4041,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndriver.get('https://www.google.ca/imghp?hl=en&tab=ri&authuser=0&ogbl')\n<mask token>\nfor name in names:\n box = driver.find_element_by_xpath('//*[@id=\"sbtc\"]/div/div[2]/input')\n box.send_keys(name + str(' cover ps4'))\n box.send_keys(Keys.ENTER)\n for i in range(0, 1):\n try:\n driver.find_element_by_xpath(\n '//*[@id=\"islrg\"]/div[1]/div[1]/a[1]/div[1]/img').screenshot(\n 'C:/Users/AAYUSH/OneDrive/Desktop/labels/images/image(' +\n str(k) + ').png')\n k = k + 1\n except:\n pass\n driver.get('https://www.google.ca/imghp?hl=en&tab=ri&authuser=0&ogbl')\n",
"step-3": "<mask token>\ndf = pd.read_csv('games_data.csv')\nnames = df['game']\ndriver = webdriver.Chrome('D:/chromedriver.exe')\ndriver.get('https://www.google.ca/imghp?hl=en&tab=ri&authuser=0&ogbl')\nk = 0\nfor name in names:\n box = driver.find_element_by_xpath('//*[@id=\"sbtc\"]/div/div[2]/input')\n box.send_keys(name + str(' cover ps4'))\n box.send_keys(Keys.ENTER)\n for i in range(0, 1):\n try:\n driver.find_element_by_xpath(\n '//*[@id=\"islrg\"]/div[1]/div[1]/a[1]/div[1]/img').screenshot(\n 'C:/Users/AAYUSH/OneDrive/Desktop/labels/images/image(' +\n str(k) + ').png')\n k = k + 1\n except:\n pass\n driver.get('https://www.google.ca/imghp?hl=en&tab=ri&authuser=0&ogbl')\n",
"step-4": "from selenium import webdriver\nfrom selenium.webdriver.common.keys import Keys\nimport time\nimport pandas as pd\ndf = pd.read_csv('games_data.csv')\nnames = df['game']\ndriver = webdriver.Chrome('D:/chromedriver.exe')\ndriver.get('https://www.google.ca/imghp?hl=en&tab=ri&authuser=0&ogbl')\nk = 0\nfor name in names:\n box = driver.find_element_by_xpath('//*[@id=\"sbtc\"]/div/div[2]/input')\n box.send_keys(name + str(' cover ps4'))\n box.send_keys(Keys.ENTER)\n for i in range(0, 1):\n try:\n driver.find_element_by_xpath(\n '//*[@id=\"islrg\"]/div[1]/div[1]/a[1]/div[1]/img').screenshot(\n 'C:/Users/AAYUSH/OneDrive/Desktop/labels/images/image(' +\n str(k) + ').png')\n k = k + 1\n except:\n pass\n driver.get('https://www.google.ca/imghp?hl=en&tab=ri&authuser=0&ogbl')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import os
import sys
from shutil import copyfile
def buildDocumentation():
"""
Build eMonitor Documentation with sphinx
:param sys.argv:
* html: build html documentation in directory */docs/output/html*
* pdf: build pdf documentation in directory */docs/output/pdf*
"""
helptext = 'usage: build_doc.py <output format> <type of documentation>' \
'\n - html: for html output' \
'\n - pdf: for pdf output' \
'\n\n - all: complete documentation' \
'\n - dev: only developer documentation' \
'\n - user: only user documentation'
if len(sys.argv) != 3:
print helptext
sys.exit(1)
if sys.argv[1] not in ['pdf', 'html']:
print helptext
sys.exit(1)
if sys.argv[2] not in ['all', 'dev', 'user']:
print helptext
sys.exit(1)
copyfile('docs/index_%s.rst.template' % sys.argv[2], 'index.rst') # copy main file into root directory
os.system('sphinx-build -b %s -c docs -D master_doc=index . docs/output/%s/%s' % (sys.argv[1], sys.argv[1], sys.argv[2]))
os.remove('index.rst') # delete config file from root directory
if __name__ == '__main__':
buildDocumentation()
|
normal
|
{
"blob_id": "e60c3a6aececd97ec08ae32b552bcda795375b3b",
"index": 779,
"step-1": "import os\nimport sys\nfrom shutil import copyfile\n\n\ndef buildDocumentation():\n \"\"\"\n Build eMonitor Documentation with sphinx\n\n :param sys.argv:\n\n * html: build html documentation in directory */docs/output/html*\n * pdf: build pdf documentation in directory */docs/output/pdf*\n\n \"\"\"\n helptext = 'usage: build_doc.py <output format> <type of documentation>' \\\n '\\n - html: for html output' \\\n '\\n - pdf: for pdf output' \\\n '\\n\\n - all: complete documentation' \\\n '\\n - dev: only developer documentation' \\\n '\\n - user: only user documentation'\n if len(sys.argv) != 3:\n print helptext\n sys.exit(1)\n\n if sys.argv[1] not in ['pdf', 'html']:\n print helptext\n sys.exit(1)\n if sys.argv[2] not in ['all', 'dev', 'user']:\n print helptext\n sys.exit(1)\n\n copyfile('docs/index_%s.rst.template' % sys.argv[2], 'index.rst') # copy main file into root directory\n os.system('sphinx-build -b %s -c docs -D master_doc=index . docs/output/%s/%s' % (sys.argv[1], sys.argv[1], sys.argv[2]))\n os.remove('index.rst') # delete config file from root directory\n\nif __name__ == '__main__':\n buildDocumentation()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import numpy as np
import tkinter as tk
import time
HEIGHT = 100
WIDTH = 800
ROBOT_START_X = 700
ROBOT_START_Y = 50
SLEEP_TIME = 0.00001
SLEEP_TIME_RESET = 0.2
class Environment(tk.Tk, object):
def __init__(self):
super(Environment, self).__init__()
self.action_space = ['g', 'b'] # go, break
self.num_actions = len(self.action_space)
self.title('Environment')
self.geometry('{0}x{1}'.format(WIDTH, HEIGHT))
self._build_environment()
def _build_environment(self):
self.canvas = tk.Canvas(self, bg='white', height=HEIGHT, width=WIDTH)
# create obstacle
obstacle_center = np.array([20, 50])
self.obstacle = self.canvas.create_rectangle(
obstacle_center[0] - 10, obstacle_center[1] - 40,
obstacle_center[0] + 10, obstacle_center[1] + 40,
fill='black'
)
# create robot
robot_center = np.array([ROBOT_START_X, ROBOT_START_Y])
self.robot = self.canvas.create_polygon([
robot_center[0] - 25, robot_center[1] + 10, robot_center[0] - 25, robot_center[1] - 10,
robot_center[0] - 15, robot_center[1] - 10, robot_center[0] - 15, robot_center[1] - 25,
robot_center[0] + 25, robot_center[1] - 25, robot_center[0] + 25, robot_center[1] + 25,
robot_center[0] - 15, robot_center[1] + 25, robot_center[0] - 15, robot_center[1] + 10
],
fill='blue'
)
# pack
self.canvas.pack()
def stop_robot(self):
# change outline to show the robot slows down
self.canvas.itemconfig(self.robot, outline='red')
# slow down robot
for i in range(50):
self.canvas.move(self.robot, -1, 0)
time.sleep(SLEEP_TIME * 10 * i)
self.render()
# change outline back again
self.canvas.itemconfig(self.robot, outline='')
self.render()
time.sleep(0.2)
def perform_action(self, action):
stopped = False
done = False
reward = 0
if action == 0: # drive
self.canvas.move(self.robot, -1, 0)
elif action == 1: # break
# if you want to speed up the process comment the next line in and the function stop_robot out
#self.canvas.move(self.robot, -50, 0) # move further because of stop distance
self.stop_robot()
stopped = True
nextState = self.canvas.coords(self.robot)
obstCoords = self.canvas.coords(self.obstacle)
dist = nextState[0] - obstCoords[2]
if stopped:
if (dist >= 15 and dist <= 40): # if enough space to obstacle
reward = 1
done = True
elif dist < 15: # if too close to obstacle
reward = -1
done = True
else: # if too far away to obstacle
reward = -1
done = False
elif nextState[0] <= obstCoords[2]: # if robot hits obstacle
reward = -1
done = True
return dist, reward, done
def reset(self):
self.update()
time.sleep(SLEEP_TIME_RESET)
self.canvas.delete(self.robot)
# create robot
robot_center = np.array([ROBOT_START_X, ROBOT_START_Y])
self.robot = self.canvas.create_polygon([
robot_center[0] - 25, robot_center[1] + 10, robot_center[0] - 25, robot_center[1] - 10,
robot_center[0] - 15, robot_center[1] - 10, robot_center[0] - 15, robot_center[1] - 25,
robot_center[0] + 25, robot_center[1] - 25, robot_center[0] + 25, robot_center[1] + 25,
robot_center[0] - 15, robot_center[1] + 25, robot_center[0] - 15, robot_center[1] + 10
],
fill='blue'
)
robotCoords = self.canvas.coords(self.robot)
obstCoords = self.canvas.coords(self.obstacle)
dist = robotCoords[0] - obstCoords[2]
return dist
def render(self):
time.sleep(SLEEP_TIME)
self.update()
|
normal
|
{
"blob_id": "ee272fe1a023d85d818a8532055dcb5dbcb6a707",
"index": 4799,
"step-1": "<mask token>\n\n\nclass Environment(tk.Tk, object):\n\n def __init__(self):\n super(Environment, self).__init__()\n self.action_space = ['g', 'b']\n self.num_actions = len(self.action_space)\n self.title('Environment')\n self.geometry('{0}x{1}'.format(WIDTH, HEIGHT))\n self._build_environment()\n <mask token>\n <mask token>\n\n def perform_action(self, action):\n stopped = False\n done = False\n reward = 0\n if action == 0:\n self.canvas.move(self.robot, -1, 0)\n elif action == 1:\n self.stop_robot()\n stopped = True\n nextState = self.canvas.coords(self.robot)\n obstCoords = self.canvas.coords(self.obstacle)\n dist = nextState[0] - obstCoords[2]\n if stopped:\n if dist >= 15 and dist <= 40:\n reward = 1\n done = True\n elif dist < 15:\n reward = -1\n done = True\n else:\n reward = -1\n done = False\n elif nextState[0] <= obstCoords[2]:\n reward = -1\n done = True\n return dist, reward, done\n\n def reset(self):\n self.update()\n time.sleep(SLEEP_TIME_RESET)\n self.canvas.delete(self.robot)\n robot_center = np.array([ROBOT_START_X, ROBOT_START_Y])\n self.robot = self.canvas.create_polygon([robot_center[0] - 25, \n robot_center[1] + 10, robot_center[0] - 25, robot_center[1] - \n 10, robot_center[0] - 15, robot_center[1] - 10, robot_center[0] -\n 15, robot_center[1] - 25, robot_center[0] + 25, robot_center[1] -\n 25, robot_center[0] + 25, robot_center[1] + 25, robot_center[0] -\n 15, robot_center[1] + 25, robot_center[0] - 15, robot_center[1] +\n 10], fill='blue')\n robotCoords = self.canvas.coords(self.robot)\n obstCoords = self.canvas.coords(self.obstacle)\n dist = robotCoords[0] - obstCoords[2]\n return dist\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Environment(tk.Tk, object):\n\n def __init__(self):\n super(Environment, self).__init__()\n self.action_space = ['g', 'b']\n self.num_actions = len(self.action_space)\n self.title('Environment')\n self.geometry('{0}x{1}'.format(WIDTH, HEIGHT))\n self._build_environment()\n\n def _build_environment(self):\n self.canvas = tk.Canvas(self, bg='white', height=HEIGHT, width=WIDTH)\n obstacle_center = np.array([20, 50])\n self.obstacle = self.canvas.create_rectangle(obstacle_center[0] - \n 10, obstacle_center[1] - 40, obstacle_center[0] + 10, \n obstacle_center[1] + 40, fill='black')\n robot_center = np.array([ROBOT_START_X, ROBOT_START_Y])\n self.robot = self.canvas.create_polygon([robot_center[0] - 25, \n robot_center[1] + 10, robot_center[0] - 25, robot_center[1] - \n 10, robot_center[0] - 15, robot_center[1] - 10, robot_center[0] -\n 15, robot_center[1] - 25, robot_center[0] + 25, robot_center[1] -\n 25, robot_center[0] + 25, robot_center[1] + 25, robot_center[0] -\n 15, robot_center[1] + 25, robot_center[0] - 15, robot_center[1] +\n 10], fill='blue')\n self.canvas.pack()\n <mask token>\n\n def perform_action(self, action):\n stopped = False\n done = False\n reward = 0\n if action == 0:\n self.canvas.move(self.robot, -1, 0)\n elif action == 1:\n self.stop_robot()\n stopped = True\n nextState = self.canvas.coords(self.robot)\n obstCoords = self.canvas.coords(self.obstacle)\n dist = nextState[0] - obstCoords[2]\n if stopped:\n if dist >= 15 and dist <= 40:\n reward = 1\n done = True\n elif dist < 15:\n reward = -1\n done = True\n else:\n reward = -1\n done = False\n elif nextState[0] <= obstCoords[2]:\n reward = -1\n done = True\n return dist, reward, done\n\n def reset(self):\n self.update()\n time.sleep(SLEEP_TIME_RESET)\n self.canvas.delete(self.robot)\n robot_center = np.array([ROBOT_START_X, ROBOT_START_Y])\n self.robot = self.canvas.create_polygon([robot_center[0] - 25, \n robot_center[1] + 10, robot_center[0] - 25, robot_center[1] - \n 10, robot_center[0] - 15, robot_center[1] - 10, robot_center[0] -\n 15, robot_center[1] - 25, robot_center[0] + 25, robot_center[1] -\n 25, robot_center[0] + 25, robot_center[1] + 25, robot_center[0] -\n 15, robot_center[1] + 25, robot_center[0] - 15, robot_center[1] +\n 10], fill='blue')\n robotCoords = self.canvas.coords(self.robot)\n obstCoords = self.canvas.coords(self.obstacle)\n dist = robotCoords[0] - obstCoords[2]\n return dist\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Environment(tk.Tk, object):\n\n def __init__(self):\n super(Environment, self).__init__()\n self.action_space = ['g', 'b']\n self.num_actions = len(self.action_space)\n self.title('Environment')\n self.geometry('{0}x{1}'.format(WIDTH, HEIGHT))\n self._build_environment()\n\n def _build_environment(self):\n self.canvas = tk.Canvas(self, bg='white', height=HEIGHT, width=WIDTH)\n obstacle_center = np.array([20, 50])\n self.obstacle = self.canvas.create_rectangle(obstacle_center[0] - \n 10, obstacle_center[1] - 40, obstacle_center[0] + 10, \n obstacle_center[1] + 40, fill='black')\n robot_center = np.array([ROBOT_START_X, ROBOT_START_Y])\n self.robot = self.canvas.create_polygon([robot_center[0] - 25, \n robot_center[1] + 10, robot_center[0] - 25, robot_center[1] - \n 10, robot_center[0] - 15, robot_center[1] - 10, robot_center[0] -\n 15, robot_center[1] - 25, robot_center[0] + 25, robot_center[1] -\n 25, robot_center[0] + 25, robot_center[1] + 25, robot_center[0] -\n 15, robot_center[1] + 25, robot_center[0] - 15, robot_center[1] +\n 10], fill='blue')\n self.canvas.pack()\n <mask token>\n\n def perform_action(self, action):\n stopped = False\n done = False\n reward = 0\n if action == 0:\n self.canvas.move(self.robot, -1, 0)\n elif action == 1:\n self.stop_robot()\n stopped = True\n nextState = self.canvas.coords(self.robot)\n obstCoords = self.canvas.coords(self.obstacle)\n dist = nextState[0] - obstCoords[2]\n if stopped:\n if dist >= 15 and dist <= 40:\n reward = 1\n done = True\n elif dist < 15:\n reward = -1\n done = True\n else:\n reward = -1\n done = False\n elif nextState[0] <= obstCoords[2]:\n reward = -1\n done = True\n return dist, reward, done\n\n def reset(self):\n self.update()\n time.sleep(SLEEP_TIME_RESET)\n self.canvas.delete(self.robot)\n robot_center = np.array([ROBOT_START_X, ROBOT_START_Y])\n self.robot = self.canvas.create_polygon([robot_center[0] - 25, \n robot_center[1] + 10, robot_center[0] - 25, robot_center[1] - \n 10, robot_center[0] - 15, robot_center[1] - 10, robot_center[0] -\n 15, robot_center[1] - 25, robot_center[0] + 25, robot_center[1] -\n 25, robot_center[0] + 25, robot_center[1] + 25, robot_center[0] -\n 15, robot_center[1] + 25, robot_center[0] - 15, robot_center[1] +\n 10], fill='blue')\n robotCoords = self.canvas.coords(self.robot)\n obstCoords = self.canvas.coords(self.obstacle)\n dist = robotCoords[0] - obstCoords[2]\n return dist\n\n def render(self):\n time.sleep(SLEEP_TIME)\n self.update()\n",
"step-4": "<mask token>\nHEIGHT = 100\nWIDTH = 800\nROBOT_START_X = 700\nROBOT_START_Y = 50\nSLEEP_TIME = 1e-05\nSLEEP_TIME_RESET = 0.2\n\n\nclass Environment(tk.Tk, object):\n\n def __init__(self):\n super(Environment, self).__init__()\n self.action_space = ['g', 'b']\n self.num_actions = len(self.action_space)\n self.title('Environment')\n self.geometry('{0}x{1}'.format(WIDTH, HEIGHT))\n self._build_environment()\n\n def _build_environment(self):\n self.canvas = tk.Canvas(self, bg='white', height=HEIGHT, width=WIDTH)\n obstacle_center = np.array([20, 50])\n self.obstacle = self.canvas.create_rectangle(obstacle_center[0] - \n 10, obstacle_center[1] - 40, obstacle_center[0] + 10, \n obstacle_center[1] + 40, fill='black')\n robot_center = np.array([ROBOT_START_X, ROBOT_START_Y])\n self.robot = self.canvas.create_polygon([robot_center[0] - 25, \n robot_center[1] + 10, robot_center[0] - 25, robot_center[1] - \n 10, robot_center[0] - 15, robot_center[1] - 10, robot_center[0] -\n 15, robot_center[1] - 25, robot_center[0] + 25, robot_center[1] -\n 25, robot_center[0] + 25, robot_center[1] + 25, robot_center[0] -\n 15, robot_center[1] + 25, robot_center[0] - 15, robot_center[1] +\n 10], fill='blue')\n self.canvas.pack()\n\n def stop_robot(self):\n self.canvas.itemconfig(self.robot, outline='red')\n for i in range(50):\n self.canvas.move(self.robot, -1, 0)\n time.sleep(SLEEP_TIME * 10 * i)\n self.render()\n self.canvas.itemconfig(self.robot, outline='')\n self.render()\n time.sleep(0.2)\n\n def perform_action(self, action):\n stopped = False\n done = False\n reward = 0\n if action == 0:\n self.canvas.move(self.robot, -1, 0)\n elif action == 1:\n self.stop_robot()\n stopped = True\n nextState = self.canvas.coords(self.robot)\n obstCoords = self.canvas.coords(self.obstacle)\n dist = nextState[0] - obstCoords[2]\n if stopped:\n if dist >= 15 and dist <= 40:\n reward = 1\n done = True\n elif dist < 15:\n reward = -1\n done = True\n else:\n reward = -1\n done = False\n elif nextState[0] <= obstCoords[2]:\n reward = -1\n done = True\n return dist, reward, done\n\n def reset(self):\n self.update()\n time.sleep(SLEEP_TIME_RESET)\n self.canvas.delete(self.robot)\n robot_center = np.array([ROBOT_START_X, ROBOT_START_Y])\n self.robot = self.canvas.create_polygon([robot_center[0] - 25, \n robot_center[1] + 10, robot_center[0] - 25, robot_center[1] - \n 10, robot_center[0] - 15, robot_center[1] - 10, robot_center[0] -\n 15, robot_center[1] - 25, robot_center[0] + 25, robot_center[1] -\n 25, robot_center[0] + 25, robot_center[1] + 25, robot_center[0] -\n 15, robot_center[1] + 25, robot_center[0] - 15, robot_center[1] +\n 10], fill='blue')\n robotCoords = self.canvas.coords(self.robot)\n obstCoords = self.canvas.coords(self.obstacle)\n dist = robotCoords[0] - obstCoords[2]\n return dist\n\n def render(self):\n time.sleep(SLEEP_TIME)\n self.update()\n",
"step-5": "import numpy as np\nimport tkinter as tk\nimport time\n\nHEIGHT = 100\nWIDTH = 800\nROBOT_START_X = 700\nROBOT_START_Y = 50\nSLEEP_TIME = 0.00001\nSLEEP_TIME_RESET = 0.2\n\nclass Environment(tk.Tk, object):\n def __init__(self):\n super(Environment, self).__init__()\n self.action_space = ['g', 'b'] # go, break\n self.num_actions = len(self.action_space)\n self.title('Environment')\n self.geometry('{0}x{1}'.format(WIDTH, HEIGHT))\n self._build_environment()\n\n def _build_environment(self):\n self.canvas = tk.Canvas(self, bg='white', height=HEIGHT, width=WIDTH)\n\n # create obstacle\n obstacle_center = np.array([20, 50])\n self.obstacle = self.canvas.create_rectangle(\n obstacle_center[0] - 10, obstacle_center[1] - 40,\n obstacle_center[0] + 10, obstacle_center[1] + 40,\n fill='black'\n )\n\n # create robot\n robot_center = np.array([ROBOT_START_X, ROBOT_START_Y])\n self.robot = self.canvas.create_polygon([\n robot_center[0] - 25, robot_center[1] + 10, robot_center[0] - 25, robot_center[1] - 10,\n robot_center[0] - 15, robot_center[1] - 10, robot_center[0] - 15, robot_center[1] - 25,\n robot_center[0] + 25, robot_center[1] - 25, robot_center[0] + 25, robot_center[1] + 25,\n robot_center[0] - 15, robot_center[1] + 25, robot_center[0] - 15, robot_center[1] + 10\n ], \n fill='blue'\n )\n\n # pack\n self.canvas.pack()\n\n def stop_robot(self):\n # change outline to show the robot slows down\n self.canvas.itemconfig(self.robot, outline='red')\n \n # slow down robot\n for i in range(50):\n self.canvas.move(self.robot, -1, 0)\n time.sleep(SLEEP_TIME * 10 * i)\n self.render()\n\n # change outline back again\n self.canvas.itemconfig(self.robot, outline='')\n self.render()\n time.sleep(0.2)\n\n def perform_action(self, action):\n stopped = False\n done = False\n reward = 0\n\n if action == 0: # drive\n self.canvas.move(self.robot, -1, 0)\n elif action == 1: # break\n # if you want to speed up the process comment the next line in and the function stop_robot out\n #self.canvas.move(self.robot, -50, 0) # move further because of stop distance\n self.stop_robot()\n stopped = True\n\n nextState = self.canvas.coords(self.robot)\n obstCoords = self.canvas.coords(self.obstacle)\n dist = nextState[0] - obstCoords[2]\n\n if stopped:\n if (dist >= 15 and dist <= 40): # if enough space to obstacle\n reward = 1\n done = True\n elif dist < 15: # if too close to obstacle\n reward = -1\n done = True\n else: # if too far away to obstacle\n reward = -1\n done = False\n elif nextState[0] <= obstCoords[2]: # if robot hits obstacle\n reward = -1\n done = True\n\n return dist, reward, done\n\n def reset(self):\n self.update()\n time.sleep(SLEEP_TIME_RESET)\n self.canvas.delete(self.robot)\n\n # create robot\n robot_center = np.array([ROBOT_START_X, ROBOT_START_Y])\n self.robot = self.canvas.create_polygon([\n robot_center[0] - 25, robot_center[1] + 10, robot_center[0] - 25, robot_center[1] - 10,\n robot_center[0] - 15, robot_center[1] - 10, robot_center[0] - 15, robot_center[1] - 25,\n robot_center[0] + 25, robot_center[1] - 25, robot_center[0] + 25, robot_center[1] + 25,\n robot_center[0] - 15, robot_center[1] + 25, robot_center[0] - 15, robot_center[1] + 10\n ], \n fill='blue'\n )\n\n robotCoords = self.canvas.coords(self.robot)\n obstCoords = self.canvas.coords(self.obstacle)\n dist = robotCoords[0] - obstCoords[2]\n\n return dist\n\n def render(self):\n time.sleep(SLEEP_TIME)\n self.update()",
"step-ids": [
4,
5,
6,
8,
10
]
}
|
[
4,
5,
6,
8,
10
] |
from leapp.models import Model, fields
from leapp.topics import TransactionTopic
class TargetRepositoryBase(Model):
topic = TransactionTopic
repoid = fields.String()
class UsedTargetRepository(TargetRepositoryBase):
pass
class RHELTargetRepository(TargetRepositoryBase):
pass
class CustomTargetRepository(TargetRepositoryBase):
name = fields.Nullable(fields.String())
baseurl = fields.Nullable(fields.String())
enabled = fields.Boolean(default=True)
class TargetRepositories(Model):
topic = TransactionTopic
rhel_repos = fields.List(fields.Model(RHELTargetRepository))
custom_repos = fields.List(fields.Model(CustomTargetRepository), default=[])
class UsedTargetRepositories(Model):
topic = TransactionTopic
repos = fields.List(fields.Model(UsedTargetRepository))
class CustomTargetRepositoryFile(Model):
topic = TransactionTopic
file = fields.String()
|
normal
|
{
"blob_id": "47dc9212a1059cbca8ec6732deaa835fa9967fd8",
"index": 2990,
"step-1": "<mask token>\n\n\nclass RHELTargetRepository(TargetRepositoryBase):\n pass\n\n\nclass CustomTargetRepository(TargetRepositoryBase):\n name = fields.Nullable(fields.String())\n baseurl = fields.Nullable(fields.String())\n enabled = fields.Boolean(default=True)\n\n\nclass TargetRepositories(Model):\n topic = TransactionTopic\n rhel_repos = fields.List(fields.Model(RHELTargetRepository))\n custom_repos = fields.List(fields.Model(CustomTargetRepository), default=[]\n )\n\n\nclass UsedTargetRepositories(Model):\n topic = TransactionTopic\n repos = fields.List(fields.Model(UsedTargetRepository))\n\n\nclass CustomTargetRepositoryFile(Model):\n topic = TransactionTopic\n file = fields.String()\n",
"step-2": "<mask token>\n\n\nclass UsedTargetRepository(TargetRepositoryBase):\n pass\n\n\nclass RHELTargetRepository(TargetRepositoryBase):\n pass\n\n\nclass CustomTargetRepository(TargetRepositoryBase):\n name = fields.Nullable(fields.String())\n baseurl = fields.Nullable(fields.String())\n enabled = fields.Boolean(default=True)\n\n\nclass TargetRepositories(Model):\n topic = TransactionTopic\n rhel_repos = fields.List(fields.Model(RHELTargetRepository))\n custom_repos = fields.List(fields.Model(CustomTargetRepository), default=[]\n )\n\n\nclass UsedTargetRepositories(Model):\n topic = TransactionTopic\n repos = fields.List(fields.Model(UsedTargetRepository))\n\n\nclass CustomTargetRepositoryFile(Model):\n topic = TransactionTopic\n file = fields.String()\n",
"step-3": "<mask token>\n\n\nclass TargetRepositoryBase(Model):\n <mask token>\n <mask token>\n\n\nclass UsedTargetRepository(TargetRepositoryBase):\n pass\n\n\nclass RHELTargetRepository(TargetRepositoryBase):\n pass\n\n\nclass CustomTargetRepository(TargetRepositoryBase):\n name = fields.Nullable(fields.String())\n baseurl = fields.Nullable(fields.String())\n enabled = fields.Boolean(default=True)\n\n\nclass TargetRepositories(Model):\n topic = TransactionTopic\n rhel_repos = fields.List(fields.Model(RHELTargetRepository))\n custom_repos = fields.List(fields.Model(CustomTargetRepository), default=[]\n )\n\n\nclass UsedTargetRepositories(Model):\n topic = TransactionTopic\n repos = fields.List(fields.Model(UsedTargetRepository))\n\n\nclass CustomTargetRepositoryFile(Model):\n topic = TransactionTopic\n file = fields.String()\n",
"step-4": "<mask token>\n\n\nclass TargetRepositoryBase(Model):\n topic = TransactionTopic\n repoid = fields.String()\n\n\nclass UsedTargetRepository(TargetRepositoryBase):\n pass\n\n\nclass RHELTargetRepository(TargetRepositoryBase):\n pass\n\n\nclass CustomTargetRepository(TargetRepositoryBase):\n name = fields.Nullable(fields.String())\n baseurl = fields.Nullable(fields.String())\n enabled = fields.Boolean(default=True)\n\n\nclass TargetRepositories(Model):\n topic = TransactionTopic\n rhel_repos = fields.List(fields.Model(RHELTargetRepository))\n custom_repos = fields.List(fields.Model(CustomTargetRepository), default=[]\n )\n\n\nclass UsedTargetRepositories(Model):\n topic = TransactionTopic\n repos = fields.List(fields.Model(UsedTargetRepository))\n\n\nclass CustomTargetRepositoryFile(Model):\n topic = TransactionTopic\n file = fields.String()\n",
"step-5": "from leapp.models import Model, fields\nfrom leapp.topics import TransactionTopic\n\n\nclass TargetRepositoryBase(Model):\n topic = TransactionTopic\n repoid = fields.String()\n\n\nclass UsedTargetRepository(TargetRepositoryBase):\n pass\n\n\nclass RHELTargetRepository(TargetRepositoryBase):\n pass\n\n\nclass CustomTargetRepository(TargetRepositoryBase):\n name = fields.Nullable(fields.String())\n baseurl = fields.Nullable(fields.String())\n enabled = fields.Boolean(default=True)\n\n\nclass TargetRepositories(Model):\n topic = TransactionTopic\n rhel_repos = fields.List(fields.Model(RHELTargetRepository))\n custom_repos = fields.List(fields.Model(CustomTargetRepository), default=[])\n\n\nclass UsedTargetRepositories(Model):\n topic = TransactionTopic\n repos = fields.List(fields.Model(UsedTargetRepository))\n\n\nclass CustomTargetRepositoryFile(Model):\n topic = TransactionTopic\n file = fields.String()\n",
"step-ids": [
9,
10,
11,
12,
14
]
}
|
[
9,
10,
11,
12,
14
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
with open('C:\\Users\\lenovo\\Desktop\\哈工大社会计算与信息检索研究中心同义词词林扩展版.txt') as f:
with open('convert.txt', 'a') as w:
for line in f:
data = line[8:-1].split()
for item in data:
tmp = data.copy()
tmp.remove(item)
tmp.insert(0, item)
w.writelines('\t'.join(tmp) + '\n')
<|reserved_special_token_1|>
# -*- coding:utf-8 -*-
#实现同义词词林的规格化
with open('C:\\Users\\lenovo\\Desktop\\哈工大社会计算与信息检索研究中心同义词词林扩展版.txt') as f:
with open('convert.txt','a') as w:
for line in f:
data = line[8:-1].split()
for item in data:
tmp = data.copy()
tmp.remove(item)
tmp.insert(0,item)
w.writelines('\t'.join(tmp)+'\n')
|
flexible
|
{
"blob_id": "9109e649a90730df022df898a7760140275ad724",
"index": 4854,
"step-1": "<mask token>\n",
"step-2": "with open('C:\\\\Users\\\\lenovo\\\\Desktop\\\\哈工大社会计算与信息检索研究中心同义词词林扩展版.txt') as f:\n with open('convert.txt', 'a') as w:\n for line in f:\n data = line[8:-1].split()\n for item in data:\n tmp = data.copy()\n tmp.remove(item)\n tmp.insert(0, item)\n w.writelines('\\t'.join(tmp) + '\\n')\n",
"step-3": "# -*- coding:utf-8 -*- \r\n#实现同义词词林的规格化\r\n\r\n\r\nwith open('C:\\\\Users\\\\lenovo\\\\Desktop\\\\哈工大社会计算与信息检索研究中心同义词词林扩展版.txt') as f:\r\n with open('convert.txt','a') as w:\r\n for line in f:\r\n \r\n data = line[8:-1].split()\r\n for item in data:\r\n tmp = data.copy()\r\n tmp.remove(item)\r\n tmp.insert(0,item)\r\n w.writelines('\\t'.join(tmp)+'\\n')",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from django.db import models
class Kit(models.Model):
name = models.CharField(max_length=100, null=True)
main_image_url = models.URLField(max_length=1000)
price = models.DecimalField(max_digits=10, decimal_places=2, default=0)
description = models.CharField(max_length=1000, null=True)
class Meta:
db_table = 'kits'
class KitSubImageUrl(models.Model):
image_url = models.URLField(max_length=1000)
kit = models.ForeignKey('kit.Kit', on_delete=models.CASCADE)
class Meta:
db_table = 'kit_sub_image_urls'
class KitLike(models.Model):
user = models.ForeignKey('user.User', on_delete=models.CASCADE)
kit = models.ForeignKey('kit.Kit', on_delete=models.CASCADE)
class Meta:
db_table = 'kit_likes'
|
normal
|
{
"blob_id": "ea2183530667437e086bc89f137e464dec6f363a",
"index": 1800,
"step-1": "<mask token>\n\n\nclass KitSubImageUrl(models.Model):\n image_url = models.URLField(max_length=1000)\n kit = models.ForeignKey('kit.Kit', on_delete=models.CASCADE)\n\n\n class Meta:\n db_table = 'kit_sub_image_urls'\n\n\nclass KitLike(models.Model):\n user = models.ForeignKey('user.User', on_delete=models.CASCADE)\n kit = models.ForeignKey('kit.Kit', on_delete=models.CASCADE)\n\n\n class Meta:\n db_table = 'kit_likes'\n",
"step-2": "<mask token>\n\n\nclass Kit(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n db_table = 'kits'\n\n\nclass KitSubImageUrl(models.Model):\n image_url = models.URLField(max_length=1000)\n kit = models.ForeignKey('kit.Kit', on_delete=models.CASCADE)\n\n\n class Meta:\n db_table = 'kit_sub_image_urls'\n\n\nclass KitLike(models.Model):\n user = models.ForeignKey('user.User', on_delete=models.CASCADE)\n kit = models.ForeignKey('kit.Kit', on_delete=models.CASCADE)\n\n\n class Meta:\n db_table = 'kit_likes'\n",
"step-3": "<mask token>\n\n\nclass Kit(models.Model):\n name = models.CharField(max_length=100, null=True)\n main_image_url = models.URLField(max_length=1000)\n price = models.DecimalField(max_digits=10, decimal_places=2, default=0)\n description = models.CharField(max_length=1000, null=True)\n\n\n class Meta:\n db_table = 'kits'\n\n\nclass KitSubImageUrl(models.Model):\n image_url = models.URLField(max_length=1000)\n kit = models.ForeignKey('kit.Kit', on_delete=models.CASCADE)\n\n\n class Meta:\n db_table = 'kit_sub_image_urls'\n\n\nclass KitLike(models.Model):\n user = models.ForeignKey('user.User', on_delete=models.CASCADE)\n kit = models.ForeignKey('kit.Kit', on_delete=models.CASCADE)\n\n\n class Meta:\n db_table = 'kit_likes'\n",
"step-4": "from django.db import models\n\n\nclass Kit(models.Model):\n name = models.CharField(max_length=100, null=True)\n main_image_url = models.URLField(max_length=1000)\n price = models.DecimalField(max_digits=10, decimal_places=2, default=0)\n description = models.CharField(max_length=1000, null=True)\n\n\n class Meta:\n db_table = 'kits'\n\n\nclass KitSubImageUrl(models.Model):\n image_url = models.URLField(max_length=1000)\n kit = models.ForeignKey('kit.Kit', on_delete=models.CASCADE)\n\n\n class Meta:\n db_table = 'kit_sub_image_urls'\n\n\nclass KitLike(models.Model):\n user = models.ForeignKey('user.User', on_delete=models.CASCADE)\n kit = models.ForeignKey('kit.Kit', on_delete=models.CASCADE)\n\n\n class Meta:\n db_table = 'kit_likes'\n",
"step-5": null,
"step-ids": [
4,
5,
6,
7
]
}
|
[
4,
5,
6,
7
] |
<|reserved_special_token_0|>
def config_graph():
paths = []
path = {}
path['input_dim'] = 4116
path['name'] = 'shared1'
path['computation'] = construct_path(path['name'], [512, 512],
batch_norm=False, dropout=True, dropout_rate=0.5, noise=False,
noise_std=0.16)
path['input'] = 'organic'
paths.append(path)
path = {}
path['name'] = 'aspects'
path['input'] = 'shared1'
path['input_dim'] = 512
path['computation'] = construct_path(path['name'], [11], batch_norm=
False, activation=None)
path['optimizer'] = tf.train.AdamOptimizer(name='optimizer',
learning_rate=0.0001, beta1=0.92, beta2=0.9999)
path['loss'] = loss_map('sigmoid')
path['predictor'] = sigmoid_predictor()
paths.append(path)
return paths
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def config_graph():
paths = []
path = {}
path['input_dim'] = 4116
path['name'] = 'shared1'
path['computation'] = construct_path(path['name'], [512, 512],
batch_norm=False, dropout=True, dropout_rate=0.5, noise=False,
noise_std=0.16)
path['input'] = 'organic'
paths.append(path)
path = {}
path['name'] = 'aspects'
path['input'] = 'shared1'
path['input_dim'] = 512
path['computation'] = construct_path(path['name'], [11], batch_norm=
False, activation=None)
path['optimizer'] = tf.train.AdamOptimizer(name='optimizer',
learning_rate=0.0001, beta1=0.92, beta2=0.9999)
path['loss'] = loss_map('sigmoid')
path['predictor'] = sigmoid_predictor()
paths.append(path)
return paths
<|reserved_special_token_0|>
for f in range(0, folds):
fold_start = f * fold_size
fold_end = min((f + 1) * fold_size, dataset_size)
print(fold_start, fold_end)
org_dict = fold_data_dict(org_dict_full, fold_start, fold_end)
datasets = []
dataset = {}
dataset['name'] = 'organic'
dataset['batch_size'] = 10
dataset['features'] = org_dict['train_vecs']
dataset['type'] = tf.float32
dataset['tasks'] = [{'name': 'aspects', 'features': org_dict[
'encoded_train_labels'], 'type': tf.float32}]
datasets.append(dataset)
paths = config_graph()
params = {}
params['train_iter'] = 4001
model = TfMultiPathClassifier(datasets, paths, params)
model.train()
model.save()
y = model.get_prediciton('aspects', org_dict['test_vecs'])
x = model.get_prediciton('aspects', org_dict['train_vecs'])
multi_label_metrics(x, org_dict['train_labels'], org_dict[
'encoded_train_labels'], org_dict['labeling'], org_dict['train_data'])
_, f1 = multi_label_metrics(y, org_dict['test_labels'], org_dict[
'encoded_test_labels'], org_dict['labeling'], org_dict['test_data'],
mute=True)
avg_f1 += f1
<|reserved_special_token_0|>
print(
"""
--------------------------------------------------------------------------
Average F1 score:"""
, avg_f1)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def config_graph():
paths = []
path = {}
path['input_dim'] = 4116
path['name'] = 'shared1'
path['computation'] = construct_path(path['name'], [512, 512],
batch_norm=False, dropout=True, dropout_rate=0.5, noise=False,
noise_std=0.16)
path['input'] = 'organic'
paths.append(path)
path = {}
path['name'] = 'aspects'
path['input'] = 'shared1'
path['input_dim'] = 512
path['computation'] = construct_path(path['name'], [11], batch_norm=
False, activation=None)
path['optimizer'] = tf.train.AdamOptimizer(name='optimizer',
learning_rate=0.0001, beta1=0.92, beta2=0.9999)
path['loss'] = loss_map('sigmoid')
path['predictor'] = sigmoid_predictor()
paths.append(path)
return paths
org_dict_full = prep_organic_aspects()
dataset_size = len(org_dict_full['train_data'])
folds = 10
fold_size = ceil(dataset_size / folds)
avg_f1 = 0
for f in range(0, folds):
fold_start = f * fold_size
fold_end = min((f + 1) * fold_size, dataset_size)
print(fold_start, fold_end)
org_dict = fold_data_dict(org_dict_full, fold_start, fold_end)
datasets = []
dataset = {}
dataset['name'] = 'organic'
dataset['batch_size'] = 10
dataset['features'] = org_dict['train_vecs']
dataset['type'] = tf.float32
dataset['tasks'] = [{'name': 'aspects', 'features': org_dict[
'encoded_train_labels'], 'type': tf.float32}]
datasets.append(dataset)
paths = config_graph()
params = {}
params['train_iter'] = 4001
model = TfMultiPathClassifier(datasets, paths, params)
model.train()
model.save()
y = model.get_prediciton('aspects', org_dict['test_vecs'])
x = model.get_prediciton('aspects', org_dict['train_vecs'])
multi_label_metrics(x, org_dict['train_labels'], org_dict[
'encoded_train_labels'], org_dict['labeling'], org_dict['train_data'])
_, f1 = multi_label_metrics(y, org_dict['test_labels'], org_dict[
'encoded_test_labels'], org_dict['labeling'], org_dict['test_data'],
mute=True)
avg_f1 += f1
avg_f1 = avg_f1 / folds
print(
"""
--------------------------------------------------------------------------
Average F1 score:"""
, avg_f1)
<|reserved_special_token_1|>
from utils import *
from Dataset.input_pipe import *
from Learning.tf_multipath_classifier import *
def config_graph():
paths = []
path = {}
path['input_dim'] = 4116
path['name'] = 'shared1'
path['computation'] = construct_path(path['name'], [512, 512],
batch_norm=False, dropout=True, dropout_rate=0.5, noise=False,
noise_std=0.16)
path['input'] = 'organic'
paths.append(path)
path = {}
path['name'] = 'aspects'
path['input'] = 'shared1'
path['input_dim'] = 512
path['computation'] = construct_path(path['name'], [11], batch_norm=
False, activation=None)
path['optimizer'] = tf.train.AdamOptimizer(name='optimizer',
learning_rate=0.0001, beta1=0.92, beta2=0.9999)
path['loss'] = loss_map('sigmoid')
path['predictor'] = sigmoid_predictor()
paths.append(path)
return paths
org_dict_full = prep_organic_aspects()
dataset_size = len(org_dict_full['train_data'])
folds = 10
fold_size = ceil(dataset_size / folds)
avg_f1 = 0
for f in range(0, folds):
fold_start = f * fold_size
fold_end = min((f + 1) * fold_size, dataset_size)
print(fold_start, fold_end)
org_dict = fold_data_dict(org_dict_full, fold_start, fold_end)
datasets = []
dataset = {}
dataset['name'] = 'organic'
dataset['batch_size'] = 10
dataset['features'] = org_dict['train_vecs']
dataset['type'] = tf.float32
dataset['tasks'] = [{'name': 'aspects', 'features': org_dict[
'encoded_train_labels'], 'type': tf.float32}]
datasets.append(dataset)
paths = config_graph()
params = {}
params['train_iter'] = 4001
model = TfMultiPathClassifier(datasets, paths, params)
model.train()
model.save()
y = model.get_prediciton('aspects', org_dict['test_vecs'])
x = model.get_prediciton('aspects', org_dict['train_vecs'])
multi_label_metrics(x, org_dict['train_labels'], org_dict[
'encoded_train_labels'], org_dict['labeling'], org_dict['train_data'])
_, f1 = multi_label_metrics(y, org_dict['test_labels'], org_dict[
'encoded_test_labels'], org_dict['labeling'], org_dict['test_data'],
mute=True)
avg_f1 += f1
avg_f1 = avg_f1 / folds
print(
"""
--------------------------------------------------------------------------
Average F1 score:"""
, avg_f1)
<|reserved_special_token_1|>
from utils import *
from Dataset.input_pipe import *
from Learning.tf_multipath_classifier import *
def config_graph():
paths = []
path = {}
path['input_dim'] = 4116
path['name'] = 'shared1'
path['computation'] = construct_path(path['name'], [512, 512], batch_norm=False, dropout=True, dropout_rate=0.5, noise=False, noise_std=0.16)
path['input'] = 'organic'
paths.append(path)
path = {}
path['name'] = 'aspects'
path['input'] = 'shared1'
path['input_dim'] = 512
path['computation'] = construct_path(path['name'], [11], batch_norm=False, activation=None)
path['optimizer'] = tf.train.AdamOptimizer(name='optimizer', learning_rate=0.0001 , beta1=0.92 , beta2=0.9999)
path['loss'] = loss_map('sigmoid')
path['predictor'] = sigmoid_predictor()
paths.append(path)
return paths
org_dict_full = prep_organic_aspects()
dataset_size = len(org_dict_full['train_data'])
folds = 10
fold_size= ceil(dataset_size / folds)
avg_f1 = 0
for f in range(0,folds):
fold_start = f * fold_size
fold_end = min((f+1) * fold_size, dataset_size )
print(fold_start, fold_end)
org_dict = fold_data_dict(org_dict_full, fold_start, fold_end )
datasets = []
dataset = {}
dataset['name'] = 'organic'
# dataset['holdout'] = 50
dataset['batch_size'] = 10
dataset['features'] = org_dict['train_vecs']
dataset['type'] = tf.float32
dataset['tasks'] = [{'name' : 'aspects', 'features' : org_dict['encoded_train_labels'], 'type': tf.float32}]
datasets.append(dataset)
paths = config_graph()
params = {}
params['train_iter'] = 4001
model = TfMultiPathClassifier(datasets, paths, params)
model.train()
model.save()
y = model.get_prediciton('aspects', org_dict['test_vecs'])
x = model.get_prediciton('aspects', org_dict['train_vecs'])
multi_label_metrics(x, org_dict['train_labels'], org_dict['encoded_train_labels'],
org_dict['labeling'], org_dict['train_data'] )
_, f1 = multi_label_metrics(y, org_dict['test_labels'], org_dict['encoded_test_labels'],
org_dict['labeling'], org_dict['test_data'], mute=True )
avg_f1 +=f1
avg_f1 = avg_f1 / folds
print('\n--------------------------------------------------------------------------\nAverage F1 score:', avg_f1)
|
flexible
|
{
"blob_id": "8039430f1b65cc76f9a78b1094f110de29f0f965",
"index": 4885,
"step-1": "<mask token>\n\n\ndef config_graph():\n paths = []\n path = {}\n path['input_dim'] = 4116\n path['name'] = 'shared1'\n path['computation'] = construct_path(path['name'], [512, 512],\n batch_norm=False, dropout=True, dropout_rate=0.5, noise=False,\n noise_std=0.16)\n path['input'] = 'organic'\n paths.append(path)\n path = {}\n path['name'] = 'aspects'\n path['input'] = 'shared1'\n path['input_dim'] = 512\n path['computation'] = construct_path(path['name'], [11], batch_norm=\n False, activation=None)\n path['optimizer'] = tf.train.AdamOptimizer(name='optimizer',\n learning_rate=0.0001, beta1=0.92, beta2=0.9999)\n path['loss'] = loss_map('sigmoid')\n path['predictor'] = sigmoid_predictor()\n paths.append(path)\n return paths\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef config_graph():\n paths = []\n path = {}\n path['input_dim'] = 4116\n path['name'] = 'shared1'\n path['computation'] = construct_path(path['name'], [512, 512],\n batch_norm=False, dropout=True, dropout_rate=0.5, noise=False,\n noise_std=0.16)\n path['input'] = 'organic'\n paths.append(path)\n path = {}\n path['name'] = 'aspects'\n path['input'] = 'shared1'\n path['input_dim'] = 512\n path['computation'] = construct_path(path['name'], [11], batch_norm=\n False, activation=None)\n path['optimizer'] = tf.train.AdamOptimizer(name='optimizer',\n learning_rate=0.0001, beta1=0.92, beta2=0.9999)\n path['loss'] = loss_map('sigmoid')\n path['predictor'] = sigmoid_predictor()\n paths.append(path)\n return paths\n\n\n<mask token>\nfor f in range(0, folds):\n fold_start = f * fold_size\n fold_end = min((f + 1) * fold_size, dataset_size)\n print(fold_start, fold_end)\n org_dict = fold_data_dict(org_dict_full, fold_start, fold_end)\n datasets = []\n dataset = {}\n dataset['name'] = 'organic'\n dataset['batch_size'] = 10\n dataset['features'] = org_dict['train_vecs']\n dataset['type'] = tf.float32\n dataset['tasks'] = [{'name': 'aspects', 'features': org_dict[\n 'encoded_train_labels'], 'type': tf.float32}]\n datasets.append(dataset)\n paths = config_graph()\n params = {}\n params['train_iter'] = 4001\n model = TfMultiPathClassifier(datasets, paths, params)\n model.train()\n model.save()\n y = model.get_prediciton('aspects', org_dict['test_vecs'])\n x = model.get_prediciton('aspects', org_dict['train_vecs'])\n multi_label_metrics(x, org_dict['train_labels'], org_dict[\n 'encoded_train_labels'], org_dict['labeling'], org_dict['train_data'])\n _, f1 = multi_label_metrics(y, org_dict['test_labels'], org_dict[\n 'encoded_test_labels'], org_dict['labeling'], org_dict['test_data'],\n mute=True)\n avg_f1 += f1\n<mask token>\nprint(\n \"\"\"\n--------------------------------------------------------------------------\nAverage F1 score:\"\"\"\n , avg_f1)\n",
"step-3": "<mask token>\n\n\ndef config_graph():\n paths = []\n path = {}\n path['input_dim'] = 4116\n path['name'] = 'shared1'\n path['computation'] = construct_path(path['name'], [512, 512],\n batch_norm=False, dropout=True, dropout_rate=0.5, noise=False,\n noise_std=0.16)\n path['input'] = 'organic'\n paths.append(path)\n path = {}\n path['name'] = 'aspects'\n path['input'] = 'shared1'\n path['input_dim'] = 512\n path['computation'] = construct_path(path['name'], [11], batch_norm=\n False, activation=None)\n path['optimizer'] = tf.train.AdamOptimizer(name='optimizer',\n learning_rate=0.0001, beta1=0.92, beta2=0.9999)\n path['loss'] = loss_map('sigmoid')\n path['predictor'] = sigmoid_predictor()\n paths.append(path)\n return paths\n\n\norg_dict_full = prep_organic_aspects()\ndataset_size = len(org_dict_full['train_data'])\nfolds = 10\nfold_size = ceil(dataset_size / folds)\navg_f1 = 0\nfor f in range(0, folds):\n fold_start = f * fold_size\n fold_end = min((f + 1) * fold_size, dataset_size)\n print(fold_start, fold_end)\n org_dict = fold_data_dict(org_dict_full, fold_start, fold_end)\n datasets = []\n dataset = {}\n dataset['name'] = 'organic'\n dataset['batch_size'] = 10\n dataset['features'] = org_dict['train_vecs']\n dataset['type'] = tf.float32\n dataset['tasks'] = [{'name': 'aspects', 'features': org_dict[\n 'encoded_train_labels'], 'type': tf.float32}]\n datasets.append(dataset)\n paths = config_graph()\n params = {}\n params['train_iter'] = 4001\n model = TfMultiPathClassifier(datasets, paths, params)\n model.train()\n model.save()\n y = model.get_prediciton('aspects', org_dict['test_vecs'])\n x = model.get_prediciton('aspects', org_dict['train_vecs'])\n multi_label_metrics(x, org_dict['train_labels'], org_dict[\n 'encoded_train_labels'], org_dict['labeling'], org_dict['train_data'])\n _, f1 = multi_label_metrics(y, org_dict['test_labels'], org_dict[\n 'encoded_test_labels'], org_dict['labeling'], org_dict['test_data'],\n mute=True)\n avg_f1 += f1\navg_f1 = avg_f1 / folds\nprint(\n \"\"\"\n--------------------------------------------------------------------------\nAverage F1 score:\"\"\"\n , avg_f1)\n",
"step-4": "from utils import *\nfrom Dataset.input_pipe import *\nfrom Learning.tf_multipath_classifier import *\n\n\ndef config_graph():\n paths = []\n path = {}\n path['input_dim'] = 4116\n path['name'] = 'shared1'\n path['computation'] = construct_path(path['name'], [512, 512],\n batch_norm=False, dropout=True, dropout_rate=0.5, noise=False,\n noise_std=0.16)\n path['input'] = 'organic'\n paths.append(path)\n path = {}\n path['name'] = 'aspects'\n path['input'] = 'shared1'\n path['input_dim'] = 512\n path['computation'] = construct_path(path['name'], [11], batch_norm=\n False, activation=None)\n path['optimizer'] = tf.train.AdamOptimizer(name='optimizer',\n learning_rate=0.0001, beta1=0.92, beta2=0.9999)\n path['loss'] = loss_map('sigmoid')\n path['predictor'] = sigmoid_predictor()\n paths.append(path)\n return paths\n\n\norg_dict_full = prep_organic_aspects()\ndataset_size = len(org_dict_full['train_data'])\nfolds = 10\nfold_size = ceil(dataset_size / folds)\navg_f1 = 0\nfor f in range(0, folds):\n fold_start = f * fold_size\n fold_end = min((f + 1) * fold_size, dataset_size)\n print(fold_start, fold_end)\n org_dict = fold_data_dict(org_dict_full, fold_start, fold_end)\n datasets = []\n dataset = {}\n dataset['name'] = 'organic'\n dataset['batch_size'] = 10\n dataset['features'] = org_dict['train_vecs']\n dataset['type'] = tf.float32\n dataset['tasks'] = [{'name': 'aspects', 'features': org_dict[\n 'encoded_train_labels'], 'type': tf.float32}]\n datasets.append(dataset)\n paths = config_graph()\n params = {}\n params['train_iter'] = 4001\n model = TfMultiPathClassifier(datasets, paths, params)\n model.train()\n model.save()\n y = model.get_prediciton('aspects', org_dict['test_vecs'])\n x = model.get_prediciton('aspects', org_dict['train_vecs'])\n multi_label_metrics(x, org_dict['train_labels'], org_dict[\n 'encoded_train_labels'], org_dict['labeling'], org_dict['train_data'])\n _, f1 = multi_label_metrics(y, org_dict['test_labels'], org_dict[\n 'encoded_test_labels'], org_dict['labeling'], org_dict['test_data'],\n mute=True)\n avg_f1 += f1\navg_f1 = avg_f1 / folds\nprint(\n \"\"\"\n--------------------------------------------------------------------------\nAverage F1 score:\"\"\"\n , avg_f1)\n",
"step-5": "from utils import *\nfrom Dataset.input_pipe import *\nfrom Learning.tf_multipath_classifier import *\n\n\ndef config_graph():\n\tpaths = []\n\n\tpath = {}\n\tpath['input_dim'] = 4116\n\tpath['name'] = 'shared1'\n\tpath['computation'] = construct_path(path['name'], [512, 512], batch_norm=False, dropout=True, dropout_rate=0.5, noise=False, noise_std=0.16)\n\tpath['input'] = 'organic'\n\tpaths.append(path)\n\n\tpath = {}\n\tpath['name'] = 'aspects'\n\tpath['input'] = 'shared1'\n\tpath['input_dim'] = 512\n\tpath['computation'] = construct_path(path['name'], [11], batch_norm=False, activation=None)\n\tpath['optimizer'] = tf.train.AdamOptimizer(name='optimizer', learning_rate=0.0001 , beta1=0.92 , beta2=0.9999)\n\tpath['loss'] = loss_map('sigmoid')\n\tpath['predictor'] = sigmoid_predictor()\n\tpaths.append(path)\n\n\treturn paths\n\n\norg_dict_full = prep_organic_aspects()\ndataset_size = len(org_dict_full['train_data'])\n\nfolds = 10\nfold_size= ceil(dataset_size / folds)\navg_f1 = 0\nfor f in range(0,folds):\n\tfold_start = f * fold_size\n\tfold_end = min((f+1) * fold_size, dataset_size )\n\tprint(fold_start, fold_end)\n\torg_dict = fold_data_dict(org_dict_full, fold_start, fold_end )\n\n\tdatasets = []\n\tdataset = {}\n\tdataset['name'] = 'organic'\n\t# dataset['holdout'] = 50\n\tdataset['batch_size'] = 10\n\tdataset['features'] = org_dict['train_vecs']\n\tdataset['type'] = tf.float32\n\tdataset['tasks'] = [{'name' : 'aspects', 'features' : org_dict['encoded_train_labels'], 'type': tf.float32}]\n\tdatasets.append(dataset)\n\n\tpaths = config_graph()\n\tparams = {}\n\tparams['train_iter'] = 4001\n\n\tmodel = TfMultiPathClassifier(datasets, paths, params)\n\n\tmodel.train()\n\tmodel.save()\n\n\ty = model.get_prediciton('aspects', org_dict['test_vecs'])\n\tx = model.get_prediciton('aspects', org_dict['train_vecs'])\n\n\tmulti_label_metrics(x, org_dict['train_labels'], org_dict['encoded_train_labels'],\n\t\t\t\t\t\t\t\t\t\t\torg_dict['labeling'], org_dict['train_data'] )\n\n\t_, f1 = multi_label_metrics(y, org_dict['test_labels'], org_dict['encoded_test_labels'],\n\t\t\t\t\t\t\t\t\t\t\torg_dict['labeling'], org_dict['test_data'], mute=True )\n\tavg_f1 +=f1\n\navg_f1 = avg_f1 / folds\nprint('\\n--------------------------------------------------------------------------\\nAverage F1 score:', avg_f1)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import stevedore
from keystoneauth1 import exceptions
PLUGIN_NAMESPACE = 'keystoneauth1.plugin'
__all__ = ('get_available_plugin_names',
'get_available_plugin_loaders',
'get_plugin_loader',
'get_plugin_options',
'BaseLoader',
'PLUGIN_NAMESPACE')
def _auth_plugin_available(ext):
"""Read the value of available for whether to load this plugin."""
return ext.obj.available
def get_available_plugin_names():
"""Get the names of all the plugins that are available on the system.
This is particularly useful for help and error text to prompt a user for
example what plugins they may specify.
:returns: A list of names.
:rtype: frozenset
"""
mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,
check_func=_auth_plugin_available,
invoke_on_load=True,
propagate_map_exceptions=True)
return frozenset(mgr.names())
def get_available_plugin_loaders():
"""Retrieve all the plugin classes available on the system.
:returns: A dict with plugin entrypoint name as the key and the plugin
loader as the value.
:rtype: dict
"""
mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,
check_func=_auth_plugin_available,
invoke_on_load=True,
propagate_map_exceptions=True)
return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))
def get_plugin_loader(name):
"""Retrieve a plugin class by its entrypoint name.
:param str name: The name of the object to get.
:returns: An auth plugin class.
:rtype: :py:class:`keystoneauth1.loading.BaseLoader`
:raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:
if a plugin cannot be created.
"""
try:
mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,
invoke_on_load=True,
name=name)
except RuntimeError:
raise exceptions.NoMatchingPlugin(name)
return mgr.driver
def get_plugin_options(name):
"""Get the options for a specific plugin.
This will be the list of options that is registered and loaded by the
specified plugin.
:returns: A list of :py:class:`keystoneauth1.loading.Opt` options.
:raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:
if a plugin cannot be created.
"""
return get_plugin_loader(name).get_options()
class BaseLoader(metaclass=abc.ABCMeta):
@property
def plugin_class(self):
raise NotImplementedError()
def create_plugin(self, **kwargs):
"""Create a plugin from the options available for the loader.
Given the options that were specified by the loader create an
appropriate plugin. You can override this function in your loader.
This used to be specified by providing the plugin_class property and
this is still supported, however specifying a property didn't let you
choose a plugin type based upon the options that were presented.
Override this function if you wish to return different plugins based on
the options presented, otherwise you can simply provide the
plugin_class property.
Added 2.9
"""
return self.plugin_class(**kwargs)
@abc.abstractmethod
def get_options(self):
"""Return the list of parameters associated with the auth plugin.
This list may be used to generate CLI or config arguments.
:returns: A list of Param objects describing available plugin
parameters.
:rtype: list
"""
return []
@property
def available(self):
"""Return if the plugin is available for loading.
If a plugin is missing dependencies or for some other reason should not
be available to the current system it should override this property and
return False to exclude itself from the plugin list.
:rtype: bool
"""
return True
def load_from_options(self, **kwargs):
"""Create a plugin from the arguments retrieved from get_options.
A client can override this function to do argument validation or to
handle differences between the registered options and what is required
to create the plugin.
"""
missing_required = [o for o in self.get_options()
if o.required and kwargs.get(o.dest) is None]
if missing_required:
raise exceptions.MissingRequiredOptions(missing_required)
return self.create_plugin(**kwargs)
def load_from_options_getter(self, getter, **kwargs):
"""Load a plugin from getter function that returns appropriate values.
To handle cases other than the provided CONF and CLI loading you can
specify a custom loader function that will be queried for the option
value.
The getter is a function that takes a
:py:class:`keystoneauth1.loading.Opt` and returns a value to load with.
:param getter: A function that returns a value for the given opt.
:type getter: callable
:returns: An authentication Plugin.
:rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`
"""
for opt in (o for o in self.get_options() if o.dest not in kwargs):
val = getter(opt)
if val is not None:
val = opt.type(val)
kwargs[opt.dest] = val
return self.load_from_options(**kwargs)
|
normal
|
{
"blob_id": "53127de883fb5da3214d13904664566269becba6",
"index": 3570,
"step-1": "<mask token>\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n <mask token>\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n <mask token>\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options() if o.required and\n kwargs.get(o.dest) is None]\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n return self.create_plugin(**kwargs)\n <mask token>\n",
"step-2": "<mask token>\n\n\ndef _auth_plugin_available(ext):\n \"\"\"Read the value of available for whether to load this plugin.\"\"\"\n return ext.obj.available\n\n\n<mask token>\n\n\ndef get_available_plugin_loaders():\n \"\"\"Retrieve all the plugin classes available on the system.\n\n :returns: A dict with plugin entrypoint name as the key and the plugin\n loader as the value.\n :rtype: dict\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))\n\n\ndef get_plugin_loader(name):\n \"\"\"Retrieve a plugin class by its entrypoint name.\n\n :param str name: The name of the object to get.\n\n :returns: An auth plugin class.\n :rtype: :py:class:`keystoneauth1.loading.BaseLoader`\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n try:\n mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,\n invoke_on_load=True, name=name)\n except RuntimeError:\n raise exceptions.NoMatchingPlugin(name)\n return mgr.driver\n\n\ndef get_plugin_options(name):\n \"\"\"Get the options for a specific plugin.\n\n This will be the list of options that is registered and loaded by the\n specified plugin.\n\n :returns: A list of :py:class:`keystoneauth1.loading.Opt` options.\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n return get_plugin_loader(name).get_options()\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n\n def create_plugin(self, **kwargs):\n \"\"\"Create a plugin from the options available for the loader.\n\n Given the options that were specified by the loader create an\n appropriate plugin. You can override this function in your loader.\n\n This used to be specified by providing the plugin_class property and\n this is still supported, however specifying a property didn't let you\n choose a plugin type based upon the options that were presented.\n\n Override this function if you wish to return different plugins based on\n the options presented, otherwise you can simply provide the\n plugin_class property.\n\n Added 2.9\n \"\"\"\n return self.plugin_class(**kwargs)\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n\n @property\n def available(self):\n \"\"\"Return if the plugin is available for loading.\n\n If a plugin is missing dependencies or for some other reason should not\n be available to the current system it should override this property and\n return False to exclude itself from the plugin list.\n\n :rtype: bool\n \"\"\"\n return True\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options() if o.required and\n kwargs.get(o.dest) is None]\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n return self.create_plugin(**kwargs)\n\n def load_from_options_getter(self, getter, **kwargs):\n \"\"\"Load a plugin from getter function that returns appropriate values.\n\n To handle cases other than the provided CONF and CLI loading you can\n specify a custom loader function that will be queried for the option\n value.\n The getter is a function that takes a\n :py:class:`keystoneauth1.loading.Opt` and returns a value to load with.\n\n :param getter: A function that returns a value for the given opt.\n :type getter: callable\n\n :returns: An authentication Plugin.\n :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`\n \"\"\"\n for opt in (o for o in self.get_options() if o.dest not in kwargs):\n val = getter(opt)\n if val is not None:\n val = opt.type(val)\n kwargs[opt.dest] = val\n return self.load_from_options(**kwargs)\n",
"step-3": "<mask token>\nPLUGIN_NAMESPACE = 'keystoneauth1.plugin'\n__all__ = ('get_available_plugin_names', 'get_available_plugin_loaders',\n 'get_plugin_loader', 'get_plugin_options', 'BaseLoader', 'PLUGIN_NAMESPACE'\n )\n\n\ndef _auth_plugin_available(ext):\n \"\"\"Read the value of available for whether to load this plugin.\"\"\"\n return ext.obj.available\n\n\ndef get_available_plugin_names():\n \"\"\"Get the names of all the plugins that are available on the system.\n\n This is particularly useful for help and error text to prompt a user for\n example what plugins they may specify.\n\n :returns: A list of names.\n :rtype: frozenset\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return frozenset(mgr.names())\n\n\ndef get_available_plugin_loaders():\n \"\"\"Retrieve all the plugin classes available on the system.\n\n :returns: A dict with plugin entrypoint name as the key and the plugin\n loader as the value.\n :rtype: dict\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))\n\n\ndef get_plugin_loader(name):\n \"\"\"Retrieve a plugin class by its entrypoint name.\n\n :param str name: The name of the object to get.\n\n :returns: An auth plugin class.\n :rtype: :py:class:`keystoneauth1.loading.BaseLoader`\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n try:\n mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,\n invoke_on_load=True, name=name)\n except RuntimeError:\n raise exceptions.NoMatchingPlugin(name)\n return mgr.driver\n\n\ndef get_plugin_options(name):\n \"\"\"Get the options for a specific plugin.\n\n This will be the list of options that is registered and loaded by the\n specified plugin.\n\n :returns: A list of :py:class:`keystoneauth1.loading.Opt` options.\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n return get_plugin_loader(name).get_options()\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n\n def create_plugin(self, **kwargs):\n \"\"\"Create a plugin from the options available for the loader.\n\n Given the options that were specified by the loader create an\n appropriate plugin. You can override this function in your loader.\n\n This used to be specified by providing the plugin_class property and\n this is still supported, however specifying a property didn't let you\n choose a plugin type based upon the options that were presented.\n\n Override this function if you wish to return different plugins based on\n the options presented, otherwise you can simply provide the\n plugin_class property.\n\n Added 2.9\n \"\"\"\n return self.plugin_class(**kwargs)\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n\n @property\n def available(self):\n \"\"\"Return if the plugin is available for loading.\n\n If a plugin is missing dependencies or for some other reason should not\n be available to the current system it should override this property and\n return False to exclude itself from the plugin list.\n\n :rtype: bool\n \"\"\"\n return True\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options() if o.required and\n kwargs.get(o.dest) is None]\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n return self.create_plugin(**kwargs)\n\n def load_from_options_getter(self, getter, **kwargs):\n \"\"\"Load a plugin from getter function that returns appropriate values.\n\n To handle cases other than the provided CONF and CLI loading you can\n specify a custom loader function that will be queried for the option\n value.\n The getter is a function that takes a\n :py:class:`keystoneauth1.loading.Opt` and returns a value to load with.\n\n :param getter: A function that returns a value for the given opt.\n :type getter: callable\n\n :returns: An authentication Plugin.\n :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`\n \"\"\"\n for opt in (o for o in self.get_options() if o.dest not in kwargs):\n val = getter(opt)\n if val is not None:\n val = opt.type(val)\n kwargs[opt.dest] = val\n return self.load_from_options(**kwargs)\n",
"step-4": "import abc\nimport stevedore\nfrom keystoneauth1 import exceptions\nPLUGIN_NAMESPACE = 'keystoneauth1.plugin'\n__all__ = ('get_available_plugin_names', 'get_available_plugin_loaders',\n 'get_plugin_loader', 'get_plugin_options', 'BaseLoader', 'PLUGIN_NAMESPACE'\n )\n\n\ndef _auth_plugin_available(ext):\n \"\"\"Read the value of available for whether to load this plugin.\"\"\"\n return ext.obj.available\n\n\ndef get_available_plugin_names():\n \"\"\"Get the names of all the plugins that are available on the system.\n\n This is particularly useful for help and error text to prompt a user for\n example what plugins they may specify.\n\n :returns: A list of names.\n :rtype: frozenset\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return frozenset(mgr.names())\n\n\ndef get_available_plugin_loaders():\n \"\"\"Retrieve all the plugin classes available on the system.\n\n :returns: A dict with plugin entrypoint name as the key and the plugin\n loader as the value.\n :rtype: dict\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))\n\n\ndef get_plugin_loader(name):\n \"\"\"Retrieve a plugin class by its entrypoint name.\n\n :param str name: The name of the object to get.\n\n :returns: An auth plugin class.\n :rtype: :py:class:`keystoneauth1.loading.BaseLoader`\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n try:\n mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,\n invoke_on_load=True, name=name)\n except RuntimeError:\n raise exceptions.NoMatchingPlugin(name)\n return mgr.driver\n\n\ndef get_plugin_options(name):\n \"\"\"Get the options for a specific plugin.\n\n This will be the list of options that is registered and loaded by the\n specified plugin.\n\n :returns: A list of :py:class:`keystoneauth1.loading.Opt` options.\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n return get_plugin_loader(name).get_options()\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n\n def create_plugin(self, **kwargs):\n \"\"\"Create a plugin from the options available for the loader.\n\n Given the options that were specified by the loader create an\n appropriate plugin. You can override this function in your loader.\n\n This used to be specified by providing the plugin_class property and\n this is still supported, however specifying a property didn't let you\n choose a plugin type based upon the options that were presented.\n\n Override this function if you wish to return different plugins based on\n the options presented, otherwise you can simply provide the\n plugin_class property.\n\n Added 2.9\n \"\"\"\n return self.plugin_class(**kwargs)\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n\n @property\n def available(self):\n \"\"\"Return if the plugin is available for loading.\n\n If a plugin is missing dependencies or for some other reason should not\n be available to the current system it should override this property and\n return False to exclude itself from the plugin list.\n\n :rtype: bool\n \"\"\"\n return True\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options() if o.required and\n kwargs.get(o.dest) is None]\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n return self.create_plugin(**kwargs)\n\n def load_from_options_getter(self, getter, **kwargs):\n \"\"\"Load a plugin from getter function that returns appropriate values.\n\n To handle cases other than the provided CONF and CLI loading you can\n specify a custom loader function that will be queried for the option\n value.\n The getter is a function that takes a\n :py:class:`keystoneauth1.loading.Opt` and returns a value to load with.\n\n :param getter: A function that returns a value for the given opt.\n :type getter: callable\n\n :returns: An authentication Plugin.\n :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`\n \"\"\"\n for opt in (o for o in self.get_options() if o.dest not in kwargs):\n val = getter(opt)\n if val is not None:\n val = opt.type(val)\n kwargs[opt.dest] = val\n return self.load_from_options(**kwargs)\n",
"step-5": "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport abc\n\nimport stevedore\n\nfrom keystoneauth1 import exceptions\n\nPLUGIN_NAMESPACE = 'keystoneauth1.plugin'\n\n\n__all__ = ('get_available_plugin_names',\n 'get_available_plugin_loaders',\n 'get_plugin_loader',\n 'get_plugin_options',\n 'BaseLoader',\n 'PLUGIN_NAMESPACE')\n\n\ndef _auth_plugin_available(ext):\n \"\"\"Read the value of available for whether to load this plugin.\"\"\"\n return ext.obj.available\n\n\ndef get_available_plugin_names():\n \"\"\"Get the names of all the plugins that are available on the system.\n\n This is particularly useful for help and error text to prompt a user for\n example what plugins they may specify.\n\n :returns: A list of names.\n :rtype: frozenset\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available,\n invoke_on_load=True,\n propagate_map_exceptions=True)\n return frozenset(mgr.names())\n\n\ndef get_available_plugin_loaders():\n \"\"\"Retrieve all the plugin classes available on the system.\n\n :returns: A dict with plugin entrypoint name as the key and the plugin\n loader as the value.\n :rtype: dict\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available,\n invoke_on_load=True,\n propagate_map_exceptions=True)\n\n return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))\n\n\ndef get_plugin_loader(name):\n \"\"\"Retrieve a plugin class by its entrypoint name.\n\n :param str name: The name of the object to get.\n\n :returns: An auth plugin class.\n :rtype: :py:class:`keystoneauth1.loading.BaseLoader`\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n try:\n mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,\n invoke_on_load=True,\n name=name)\n except RuntimeError:\n raise exceptions.NoMatchingPlugin(name)\n\n return mgr.driver\n\n\ndef get_plugin_options(name):\n \"\"\"Get the options for a specific plugin.\n\n This will be the list of options that is registered and loaded by the\n specified plugin.\n\n :returns: A list of :py:class:`keystoneauth1.loading.Opt` options.\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n return get_plugin_loader(name).get_options()\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n\n def create_plugin(self, **kwargs):\n \"\"\"Create a plugin from the options available for the loader.\n\n Given the options that were specified by the loader create an\n appropriate plugin. You can override this function in your loader.\n\n This used to be specified by providing the plugin_class property and\n this is still supported, however specifying a property didn't let you\n choose a plugin type based upon the options that were presented.\n\n Override this function if you wish to return different plugins based on\n the options presented, otherwise you can simply provide the\n plugin_class property.\n\n Added 2.9\n \"\"\"\n return self.plugin_class(**kwargs)\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n\n @property\n def available(self):\n \"\"\"Return if the plugin is available for loading.\n\n If a plugin is missing dependencies or for some other reason should not\n be available to the current system it should override this property and\n return False to exclude itself from the plugin list.\n\n :rtype: bool\n \"\"\"\n return True\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options()\n if o.required and kwargs.get(o.dest) is None]\n\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n\n return self.create_plugin(**kwargs)\n\n def load_from_options_getter(self, getter, **kwargs):\n \"\"\"Load a plugin from getter function that returns appropriate values.\n\n To handle cases other than the provided CONF and CLI loading you can\n specify a custom loader function that will be queried for the option\n value.\n The getter is a function that takes a\n :py:class:`keystoneauth1.loading.Opt` and returns a value to load with.\n\n :param getter: A function that returns a value for the given opt.\n :type getter: callable\n\n :returns: An authentication Plugin.\n :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`\n \"\"\"\n for opt in (o for o in self.get_options() if o.dest not in kwargs):\n val = getter(opt)\n if val is not None:\n val = opt.type(val)\n kwargs[opt.dest] = val\n\n return self.load_from_options(**kwargs)\n",
"step-ids": [
4,
11,
13,
14,
15
]
}
|
[
4,
11,
13,
14,
15
] |
# -*- coding: utf-8 -*-
"""
======================
@author : Zhang Xu
@time : 2021/9/8:16:29
@email : [email protected]
@content : tensorflow subclassing 复现 NPA
======================
"""
import tensorflow as tf
from tensorflow.keras import *
from tensorflow.keras.layers import *
from keras import backend as K
npratio = 4
MAX_SENT_LENGTH = 30 # 一篇news的单词数量
MAX_SENTS = 50 # 一个用户的点击的news的数量
# news encoder
# 输入:user id, 1篇news的信息
# 输出:news representation
class NewsEncoder(tf.keras.Model):
def __init__(self):
super(NewsEncoder, self).__init__(name='NewsEncoder')
# user_id 部分
self.userid_input_layer = Input()
self.userid_embedding_layer = Embedding()
self.userid_dense_layer = Dense()
self.userid_flatten_layer = Flatten()
# news 部分
self.news_input_layer = Input()
self.news_embedding_layer = Embedding()
self.news_conv_layer = Conv1D()
self.news_dropout_layer_1 = Dropout(0.2)
self.news_dropout_layer_2 = Dropout(0.2)
# personalized attention 部分
self.pa_dense_layer = Dense()
self.pa_2_1_dot_layer = Dot()
self.pa_softmax_layer = Activation('softmax')
self.pa_1_1_dot_layer = Dot()
def call(self, inputs):
'''多输入:输入 user_id、 news_input'''
'''输入单个用户的 user id 和 一篇 news 的信息'''
user_id, news_input = inputs[0], inputs[1]
# qw
x1 = self.userid_input_layer(user_id)
x1 = self.userid_embedding_layer(x1)
x1 = self.userid_dense_layer(x1)
qw = self.userid_flatten_layer(x1)
# news representation
x2 = self.news_input_layer(news_input)
x2 = self.news_embedding_layer(x2)
x2 = self.news_dropout_layer_1(x2)
x2 = self.news_conv_layer(x2)
x2 = self.news_dropout_layer_2(x2)
# personalized attention
qw = self.pa_dense_layer(qw)
attention_a = self.pa_2_1_dot_layer([x2, qw])
attention_weight = self.pa_softmax_layer(attention_a)
news_rep = self.pa_1_1_dot_layer([x2, attention_weight])
return news_rep
# NPA
# 输入:user id 和 该用户所有的 clicked news(N篇) 和 candidate news(K篇)
# 输出:对K篇 candidate news 做出预测,分别给出点击的概率
class NPA(tf.keras.Model):
def __init__(self):
super(NPA, self).__init__(name='NPA')
# user id 部分
self.userid_input_layer = Input()
self.userid_embedding_layer = Embedding()
self.userid_dense_layer = Dense()
self.userid_flatten_layer = Flatten()
# clicked news 部分
self.clickednews_input_layer = [Input((MAX_SENT_LENGTH,), dtype='int32') for _ in range(MAX_SENTS)]
self.clickednews_encoder = [NewsEncoder() for _ in range(MAX_SENTS)]
self.clickednews_dense_layer = Dense()
self.clickednews_2_1_dot_layer = Dot((2, 1))
self.clickednews_softmax_layer = Activation('softmax')
self.clickednews_1_1_dot_layer = Dot((1, 1))
# candidate news 部分
self.candidatenews_input_layer = [Input((MAX_SENT_LENGTH,), dtype='int32') for _ in range(1 + npratio)]
self.candidatenews_encoder = [NewsEncoder() for _ in range(1 + npratio)]
# click prediction
self.cp_dot_layer = dot()
self.cp_concatenate = concatenate()
self.cp_activation_layer = Activation('softmax')
def call(self, inputs):
user_id, clicked_news, candidate_news = inputs[0], inputs[1], inputs[2]
# qd
x1 = self.userid_input_layer(user_id)
x1 = self.userid_embedding_layer(x1)
x1 = self.userid_dense_layer(x1)
qd = self.userid_flatten_layer(x1)
# clicked news
clicked_news_vec = [0]*MAX_SENTS
for i in range(len(clicked_news)):
xx = self.clickednews_input_layer[i](clicked_news[i])
clicked_news_vec[i] = self.clickednews_encoder[i]([user_id, xx])
clicked_news_rep = concatenate([Lambda(lambda x: K.expand_dims(x, axis=1))(news) for news in clicked_news_vec], axis=1)
# qd 与 click_news_rep 进行 personalized attention
news_temp_dense = self.clickednews_dense_layer(qd)
attention_news = self.clickednews_2_1_dot_layer([clicked_news_rep, news_temp_dense])
attention_news_weight = self.clickednews_softmax_layer(attention_news)
user_rep = self.clickednews_1_1_dot_layer([clicked_news_rep, attention_news_weight])
# candidate news
candidate_news_vec = [0]*(1+npratio)
for i in range(len(candidate_news)):
xx = self.candidatenews_input_layer[i](candidate_news[i])
candidate_news_vec[i] = self.candidatenews_encoder[i]([user_id, xx])
# click prediction
# candidate news representation 与 user representation 进行 dot 和 softmax
logits = [self.cp_dot_layer([user_rep, candidate_news], axes=-1) for candidate_news in candidate_news_vec]
logits = self.cp_activation_layer(self.cp_concatenate(logits))
return logits
|
normal
|
{
"blob_id": "f3789d70f784345881f705fc809c49ad4e3526bc",
"index": 1287,
"step-1": "<mask token>\n\n\nclass NewsEncoder(tf.keras.Model):\n\n def __init__(self):\n super(NewsEncoder, self).__init__(name='NewsEncoder')\n self.userid_input_layer = Input()\n self.userid_embedding_layer = Embedding()\n self.userid_dense_layer = Dense()\n self.userid_flatten_layer = Flatten()\n self.news_input_layer = Input()\n self.news_embedding_layer = Embedding()\n self.news_conv_layer = Conv1D()\n self.news_dropout_layer_1 = Dropout(0.2)\n self.news_dropout_layer_2 = Dropout(0.2)\n self.pa_dense_layer = Dense()\n self.pa_2_1_dot_layer = Dot()\n self.pa_softmax_layer = Activation('softmax')\n self.pa_1_1_dot_layer = Dot()\n <mask token>\n\n\nclass NPA(tf.keras.Model):\n\n def __init__(self):\n super(NPA, self).__init__(name='NPA')\n self.userid_input_layer = Input()\n self.userid_embedding_layer = Embedding()\n self.userid_dense_layer = Dense()\n self.userid_flatten_layer = Flatten()\n self.clickednews_input_layer = [Input((MAX_SENT_LENGTH,), dtype=\n 'int32') for _ in range(MAX_SENTS)]\n self.clickednews_encoder = [NewsEncoder() for _ in range(MAX_SENTS)]\n self.clickednews_dense_layer = Dense()\n self.clickednews_2_1_dot_layer = Dot((2, 1))\n self.clickednews_softmax_layer = Activation('softmax')\n self.clickednews_1_1_dot_layer = Dot((1, 1))\n self.candidatenews_input_layer = [Input((MAX_SENT_LENGTH,), dtype=\n 'int32') for _ in range(1 + npratio)]\n self.candidatenews_encoder = [NewsEncoder() for _ in range(1 + npratio)\n ]\n self.cp_dot_layer = dot()\n self.cp_concatenate = concatenate()\n self.cp_activation_layer = Activation('softmax')\n\n def call(self, inputs):\n user_id, clicked_news, candidate_news = inputs[0], inputs[1], inputs[2]\n x1 = self.userid_input_layer(user_id)\n x1 = self.userid_embedding_layer(x1)\n x1 = self.userid_dense_layer(x1)\n qd = self.userid_flatten_layer(x1)\n clicked_news_vec = [0] * MAX_SENTS\n for i in range(len(clicked_news)):\n xx = self.clickednews_input_layer[i](clicked_news[i])\n clicked_news_vec[i] = self.clickednews_encoder[i]([user_id, xx])\n clicked_news_rep = concatenate([Lambda(lambda x: K.expand_dims(x,\n axis=1))(news) for news in clicked_news_vec], axis=1)\n news_temp_dense = self.clickednews_dense_layer(qd)\n attention_news = self.clickednews_2_1_dot_layer([clicked_news_rep,\n news_temp_dense])\n attention_news_weight = self.clickednews_softmax_layer(attention_news)\n user_rep = self.clickednews_1_1_dot_layer([clicked_news_rep,\n attention_news_weight])\n candidate_news_vec = [0] * (1 + npratio)\n for i in range(len(candidate_news)):\n xx = self.candidatenews_input_layer[i](candidate_news[i])\n candidate_news_vec[i] = self.candidatenews_encoder[i]([user_id, xx]\n )\n logits = [self.cp_dot_layer([user_rep, candidate_news], axes=-1) for\n candidate_news in candidate_news_vec]\n logits = self.cp_activation_layer(self.cp_concatenate(logits))\n return logits\n",
"step-2": "<mask token>\n\n\nclass NewsEncoder(tf.keras.Model):\n\n def __init__(self):\n super(NewsEncoder, self).__init__(name='NewsEncoder')\n self.userid_input_layer = Input()\n self.userid_embedding_layer = Embedding()\n self.userid_dense_layer = Dense()\n self.userid_flatten_layer = Flatten()\n self.news_input_layer = Input()\n self.news_embedding_layer = Embedding()\n self.news_conv_layer = Conv1D()\n self.news_dropout_layer_1 = Dropout(0.2)\n self.news_dropout_layer_2 = Dropout(0.2)\n self.pa_dense_layer = Dense()\n self.pa_2_1_dot_layer = Dot()\n self.pa_softmax_layer = Activation('softmax')\n self.pa_1_1_dot_layer = Dot()\n\n def call(self, inputs):\n \"\"\"多输入:输入 user_id、 news_input\"\"\"\n \"\"\"输入单个用户的 user id 和 一篇 news 的信息\"\"\"\n user_id, news_input = inputs[0], inputs[1]\n x1 = self.userid_input_layer(user_id)\n x1 = self.userid_embedding_layer(x1)\n x1 = self.userid_dense_layer(x1)\n qw = self.userid_flatten_layer(x1)\n x2 = self.news_input_layer(news_input)\n x2 = self.news_embedding_layer(x2)\n x2 = self.news_dropout_layer_1(x2)\n x2 = self.news_conv_layer(x2)\n x2 = self.news_dropout_layer_2(x2)\n qw = self.pa_dense_layer(qw)\n attention_a = self.pa_2_1_dot_layer([x2, qw])\n attention_weight = self.pa_softmax_layer(attention_a)\n news_rep = self.pa_1_1_dot_layer([x2, attention_weight])\n return news_rep\n\n\nclass NPA(tf.keras.Model):\n\n def __init__(self):\n super(NPA, self).__init__(name='NPA')\n self.userid_input_layer = Input()\n self.userid_embedding_layer = Embedding()\n self.userid_dense_layer = Dense()\n self.userid_flatten_layer = Flatten()\n self.clickednews_input_layer = [Input((MAX_SENT_LENGTH,), dtype=\n 'int32') for _ in range(MAX_SENTS)]\n self.clickednews_encoder = [NewsEncoder() for _ in range(MAX_SENTS)]\n self.clickednews_dense_layer = Dense()\n self.clickednews_2_1_dot_layer = Dot((2, 1))\n self.clickednews_softmax_layer = Activation('softmax')\n self.clickednews_1_1_dot_layer = Dot((1, 1))\n self.candidatenews_input_layer = [Input((MAX_SENT_LENGTH,), dtype=\n 'int32') for _ in range(1 + npratio)]\n self.candidatenews_encoder = [NewsEncoder() for _ in range(1 + npratio)\n ]\n self.cp_dot_layer = dot()\n self.cp_concatenate = concatenate()\n self.cp_activation_layer = Activation('softmax')\n\n def call(self, inputs):\n user_id, clicked_news, candidate_news = inputs[0], inputs[1], inputs[2]\n x1 = self.userid_input_layer(user_id)\n x1 = self.userid_embedding_layer(x1)\n x1 = self.userid_dense_layer(x1)\n qd = self.userid_flatten_layer(x1)\n clicked_news_vec = [0] * MAX_SENTS\n for i in range(len(clicked_news)):\n xx = self.clickednews_input_layer[i](clicked_news[i])\n clicked_news_vec[i] = self.clickednews_encoder[i]([user_id, xx])\n clicked_news_rep = concatenate([Lambda(lambda x: K.expand_dims(x,\n axis=1))(news) for news in clicked_news_vec], axis=1)\n news_temp_dense = self.clickednews_dense_layer(qd)\n attention_news = self.clickednews_2_1_dot_layer([clicked_news_rep,\n news_temp_dense])\n attention_news_weight = self.clickednews_softmax_layer(attention_news)\n user_rep = self.clickednews_1_1_dot_layer([clicked_news_rep,\n attention_news_weight])\n candidate_news_vec = [0] * (1 + npratio)\n for i in range(len(candidate_news)):\n xx = self.candidatenews_input_layer[i](candidate_news[i])\n candidate_news_vec[i] = self.candidatenews_encoder[i]([user_id, xx]\n )\n logits = [self.cp_dot_layer([user_rep, candidate_news], axes=-1) for\n candidate_news in candidate_news_vec]\n logits = self.cp_activation_layer(self.cp_concatenate(logits))\n return logits\n",
"step-3": "<mask token>\nnpratio = 4\nMAX_SENT_LENGTH = 30\nMAX_SENTS = 50\n\n\nclass NewsEncoder(tf.keras.Model):\n\n def __init__(self):\n super(NewsEncoder, self).__init__(name='NewsEncoder')\n self.userid_input_layer = Input()\n self.userid_embedding_layer = Embedding()\n self.userid_dense_layer = Dense()\n self.userid_flatten_layer = Flatten()\n self.news_input_layer = Input()\n self.news_embedding_layer = Embedding()\n self.news_conv_layer = Conv1D()\n self.news_dropout_layer_1 = Dropout(0.2)\n self.news_dropout_layer_2 = Dropout(0.2)\n self.pa_dense_layer = Dense()\n self.pa_2_1_dot_layer = Dot()\n self.pa_softmax_layer = Activation('softmax')\n self.pa_1_1_dot_layer = Dot()\n\n def call(self, inputs):\n \"\"\"多输入:输入 user_id、 news_input\"\"\"\n \"\"\"输入单个用户的 user id 和 一篇 news 的信息\"\"\"\n user_id, news_input = inputs[0], inputs[1]\n x1 = self.userid_input_layer(user_id)\n x1 = self.userid_embedding_layer(x1)\n x1 = self.userid_dense_layer(x1)\n qw = self.userid_flatten_layer(x1)\n x2 = self.news_input_layer(news_input)\n x2 = self.news_embedding_layer(x2)\n x2 = self.news_dropout_layer_1(x2)\n x2 = self.news_conv_layer(x2)\n x2 = self.news_dropout_layer_2(x2)\n qw = self.pa_dense_layer(qw)\n attention_a = self.pa_2_1_dot_layer([x2, qw])\n attention_weight = self.pa_softmax_layer(attention_a)\n news_rep = self.pa_1_1_dot_layer([x2, attention_weight])\n return news_rep\n\n\nclass NPA(tf.keras.Model):\n\n def __init__(self):\n super(NPA, self).__init__(name='NPA')\n self.userid_input_layer = Input()\n self.userid_embedding_layer = Embedding()\n self.userid_dense_layer = Dense()\n self.userid_flatten_layer = Flatten()\n self.clickednews_input_layer = [Input((MAX_SENT_LENGTH,), dtype=\n 'int32') for _ in range(MAX_SENTS)]\n self.clickednews_encoder = [NewsEncoder() for _ in range(MAX_SENTS)]\n self.clickednews_dense_layer = Dense()\n self.clickednews_2_1_dot_layer = Dot((2, 1))\n self.clickednews_softmax_layer = Activation('softmax')\n self.clickednews_1_1_dot_layer = Dot((1, 1))\n self.candidatenews_input_layer = [Input((MAX_SENT_LENGTH,), dtype=\n 'int32') for _ in range(1 + npratio)]\n self.candidatenews_encoder = [NewsEncoder() for _ in range(1 + npratio)\n ]\n self.cp_dot_layer = dot()\n self.cp_concatenate = concatenate()\n self.cp_activation_layer = Activation('softmax')\n\n def call(self, inputs):\n user_id, clicked_news, candidate_news = inputs[0], inputs[1], inputs[2]\n x1 = self.userid_input_layer(user_id)\n x1 = self.userid_embedding_layer(x1)\n x1 = self.userid_dense_layer(x1)\n qd = self.userid_flatten_layer(x1)\n clicked_news_vec = [0] * MAX_SENTS\n for i in range(len(clicked_news)):\n xx = self.clickednews_input_layer[i](clicked_news[i])\n clicked_news_vec[i] = self.clickednews_encoder[i]([user_id, xx])\n clicked_news_rep = concatenate([Lambda(lambda x: K.expand_dims(x,\n axis=1))(news) for news in clicked_news_vec], axis=1)\n news_temp_dense = self.clickednews_dense_layer(qd)\n attention_news = self.clickednews_2_1_dot_layer([clicked_news_rep,\n news_temp_dense])\n attention_news_weight = self.clickednews_softmax_layer(attention_news)\n user_rep = self.clickednews_1_1_dot_layer([clicked_news_rep,\n attention_news_weight])\n candidate_news_vec = [0] * (1 + npratio)\n for i in range(len(candidate_news)):\n xx = self.candidatenews_input_layer[i](candidate_news[i])\n candidate_news_vec[i] = self.candidatenews_encoder[i]([user_id, xx]\n )\n logits = [self.cp_dot_layer([user_rep, candidate_news], axes=-1) for\n candidate_news in candidate_news_vec]\n logits = self.cp_activation_layer(self.cp_concatenate(logits))\n return logits\n",
"step-4": "<mask token>\nimport tensorflow as tf\nfrom tensorflow.keras import *\nfrom tensorflow.keras.layers import *\nfrom keras import backend as K\nnpratio = 4\nMAX_SENT_LENGTH = 30\nMAX_SENTS = 50\n\n\nclass NewsEncoder(tf.keras.Model):\n\n def __init__(self):\n super(NewsEncoder, self).__init__(name='NewsEncoder')\n self.userid_input_layer = Input()\n self.userid_embedding_layer = Embedding()\n self.userid_dense_layer = Dense()\n self.userid_flatten_layer = Flatten()\n self.news_input_layer = Input()\n self.news_embedding_layer = Embedding()\n self.news_conv_layer = Conv1D()\n self.news_dropout_layer_1 = Dropout(0.2)\n self.news_dropout_layer_2 = Dropout(0.2)\n self.pa_dense_layer = Dense()\n self.pa_2_1_dot_layer = Dot()\n self.pa_softmax_layer = Activation('softmax')\n self.pa_1_1_dot_layer = Dot()\n\n def call(self, inputs):\n \"\"\"多输入:输入 user_id、 news_input\"\"\"\n \"\"\"输入单个用户的 user id 和 一篇 news 的信息\"\"\"\n user_id, news_input = inputs[0], inputs[1]\n x1 = self.userid_input_layer(user_id)\n x1 = self.userid_embedding_layer(x1)\n x1 = self.userid_dense_layer(x1)\n qw = self.userid_flatten_layer(x1)\n x2 = self.news_input_layer(news_input)\n x2 = self.news_embedding_layer(x2)\n x2 = self.news_dropout_layer_1(x2)\n x2 = self.news_conv_layer(x2)\n x2 = self.news_dropout_layer_2(x2)\n qw = self.pa_dense_layer(qw)\n attention_a = self.pa_2_1_dot_layer([x2, qw])\n attention_weight = self.pa_softmax_layer(attention_a)\n news_rep = self.pa_1_1_dot_layer([x2, attention_weight])\n return news_rep\n\n\nclass NPA(tf.keras.Model):\n\n def __init__(self):\n super(NPA, self).__init__(name='NPA')\n self.userid_input_layer = Input()\n self.userid_embedding_layer = Embedding()\n self.userid_dense_layer = Dense()\n self.userid_flatten_layer = Flatten()\n self.clickednews_input_layer = [Input((MAX_SENT_LENGTH,), dtype=\n 'int32') for _ in range(MAX_SENTS)]\n self.clickednews_encoder = [NewsEncoder() for _ in range(MAX_SENTS)]\n self.clickednews_dense_layer = Dense()\n self.clickednews_2_1_dot_layer = Dot((2, 1))\n self.clickednews_softmax_layer = Activation('softmax')\n self.clickednews_1_1_dot_layer = Dot((1, 1))\n self.candidatenews_input_layer = [Input((MAX_SENT_LENGTH,), dtype=\n 'int32') for _ in range(1 + npratio)]\n self.candidatenews_encoder = [NewsEncoder() for _ in range(1 + npratio)\n ]\n self.cp_dot_layer = dot()\n self.cp_concatenate = concatenate()\n self.cp_activation_layer = Activation('softmax')\n\n def call(self, inputs):\n user_id, clicked_news, candidate_news = inputs[0], inputs[1], inputs[2]\n x1 = self.userid_input_layer(user_id)\n x1 = self.userid_embedding_layer(x1)\n x1 = self.userid_dense_layer(x1)\n qd = self.userid_flatten_layer(x1)\n clicked_news_vec = [0] * MAX_SENTS\n for i in range(len(clicked_news)):\n xx = self.clickednews_input_layer[i](clicked_news[i])\n clicked_news_vec[i] = self.clickednews_encoder[i]([user_id, xx])\n clicked_news_rep = concatenate([Lambda(lambda x: K.expand_dims(x,\n axis=1))(news) for news in clicked_news_vec], axis=1)\n news_temp_dense = self.clickednews_dense_layer(qd)\n attention_news = self.clickednews_2_1_dot_layer([clicked_news_rep,\n news_temp_dense])\n attention_news_weight = self.clickednews_softmax_layer(attention_news)\n user_rep = self.clickednews_1_1_dot_layer([clicked_news_rep,\n attention_news_weight])\n candidate_news_vec = [0] * (1 + npratio)\n for i in range(len(candidate_news)):\n xx = self.candidatenews_input_layer[i](candidate_news[i])\n candidate_news_vec[i] = self.candidatenews_encoder[i]([user_id, xx]\n )\n logits = [self.cp_dot_layer([user_rep, candidate_news], axes=-1) for\n candidate_news in candidate_news_vec]\n logits = self.cp_activation_layer(self.cp_concatenate(logits))\n return logits\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\n======================\r\n@author : Zhang Xu\r\n@time : 2021/9/8:16:29\r\n@email : [email protected]\r\n@content : tensorflow subclassing 复现 NPA\r\n======================\r\n\"\"\"\r\nimport tensorflow as tf\r\nfrom tensorflow.keras import *\r\nfrom tensorflow.keras.layers import *\r\nfrom keras import backend as K\r\n\r\nnpratio = 4\r\n\r\nMAX_SENT_LENGTH = 30 # 一篇news的单词数量\r\nMAX_SENTS = 50 # 一个用户的点击的news的数量\r\n\r\n# news encoder\r\n# 输入:user id, 1篇news的信息\r\n# 输出:news representation\r\nclass NewsEncoder(tf.keras.Model):\r\n\r\n def __init__(self):\r\n super(NewsEncoder, self).__init__(name='NewsEncoder')\r\n\r\n # user_id 部分\r\n self.userid_input_layer = Input()\r\n self.userid_embedding_layer = Embedding()\r\n self.userid_dense_layer = Dense()\r\n self.userid_flatten_layer = Flatten()\r\n\r\n # news 部分\r\n self.news_input_layer = Input()\r\n self.news_embedding_layer = Embedding()\r\n self.news_conv_layer = Conv1D()\r\n self.news_dropout_layer_1 = Dropout(0.2)\r\n self.news_dropout_layer_2 = Dropout(0.2)\r\n\r\n # personalized attention 部分\r\n self.pa_dense_layer = Dense()\r\n self.pa_2_1_dot_layer = Dot()\r\n self.pa_softmax_layer = Activation('softmax')\r\n self.pa_1_1_dot_layer = Dot()\r\n\r\n def call(self, inputs):\r\n '''多输入:输入 user_id、 news_input'''\r\n '''输入单个用户的 user id 和 一篇 news 的信息'''\r\n user_id, news_input = inputs[0], inputs[1]\r\n\r\n # qw\r\n x1 = self.userid_input_layer(user_id)\r\n x1 = self.userid_embedding_layer(x1)\r\n x1 = self.userid_dense_layer(x1)\r\n qw = self.userid_flatten_layer(x1)\r\n\r\n # news representation\r\n x2 = self.news_input_layer(news_input)\r\n x2 = self.news_embedding_layer(x2)\r\n x2 = self.news_dropout_layer_1(x2)\r\n x2 = self.news_conv_layer(x2)\r\n x2 = self.news_dropout_layer_2(x2)\r\n\r\n # personalized attention\r\n qw = self.pa_dense_layer(qw)\r\n attention_a = self.pa_2_1_dot_layer([x2, qw])\r\n attention_weight = self.pa_softmax_layer(attention_a)\r\n news_rep = self.pa_1_1_dot_layer([x2, attention_weight])\r\n\r\n return news_rep\r\n\r\n\r\n# NPA\r\n# 输入:user id 和 该用户所有的 clicked news(N篇) 和 candidate news(K篇)\r\n# 输出:对K篇 candidate news 做出预测,分别给出点击的概率\r\nclass NPA(tf.keras.Model):\r\n\r\n def __init__(self):\r\n super(NPA, self).__init__(name='NPA')\r\n\r\n # user id 部分\r\n self.userid_input_layer = Input()\r\n self.userid_embedding_layer = Embedding()\r\n self.userid_dense_layer = Dense()\r\n self.userid_flatten_layer = Flatten()\r\n\r\n # clicked news 部分\r\n self.clickednews_input_layer = [Input((MAX_SENT_LENGTH,), dtype='int32') for _ in range(MAX_SENTS)]\r\n self.clickednews_encoder = [NewsEncoder() for _ in range(MAX_SENTS)]\r\n self.clickednews_dense_layer = Dense()\r\n self.clickednews_2_1_dot_layer = Dot((2, 1))\r\n self.clickednews_softmax_layer = Activation('softmax')\r\n self.clickednews_1_1_dot_layer = Dot((1, 1))\r\n\r\n # candidate news 部分\r\n self.candidatenews_input_layer = [Input((MAX_SENT_LENGTH,), dtype='int32') for _ in range(1 + npratio)]\r\n self.candidatenews_encoder = [NewsEncoder() for _ in range(1 + npratio)]\r\n\r\n # click prediction\r\n self.cp_dot_layer = dot()\r\n self.cp_concatenate = concatenate()\r\n self.cp_activation_layer = Activation('softmax')\r\n\r\n\r\n def call(self, inputs):\r\n user_id, clicked_news, candidate_news = inputs[0], inputs[1], inputs[2]\r\n\r\n # qd\r\n x1 = self.userid_input_layer(user_id)\r\n x1 = self.userid_embedding_layer(x1)\r\n x1 = self.userid_dense_layer(x1)\r\n qd = self.userid_flatten_layer(x1)\r\n\r\n # clicked news\r\n clicked_news_vec = [0]*MAX_SENTS\r\n for i in range(len(clicked_news)):\r\n xx = self.clickednews_input_layer[i](clicked_news[i])\r\n clicked_news_vec[i] = self.clickednews_encoder[i]([user_id, xx])\r\n clicked_news_rep = concatenate([Lambda(lambda x: K.expand_dims(x, axis=1))(news) for news in clicked_news_vec], axis=1)\r\n\r\n # qd 与 click_news_rep 进行 personalized attention\r\n news_temp_dense = self.clickednews_dense_layer(qd)\r\n attention_news = self.clickednews_2_1_dot_layer([clicked_news_rep, news_temp_dense])\r\n attention_news_weight = self.clickednews_softmax_layer(attention_news)\r\n user_rep = self.clickednews_1_1_dot_layer([clicked_news_rep, attention_news_weight])\r\n\r\n # candidate news\r\n candidate_news_vec = [0]*(1+npratio)\r\n for i in range(len(candidate_news)):\r\n xx = self.candidatenews_input_layer[i](candidate_news[i])\r\n candidate_news_vec[i] = self.candidatenews_encoder[i]([user_id, xx])\r\n\r\n # click prediction\r\n # candidate news representation 与 user representation 进行 dot 和 softmax\r\n logits = [self.cp_dot_layer([user_rep, candidate_news], axes=-1) for candidate_news in candidate_news_vec]\r\n logits = self.cp_activation_layer(self.cp_concatenate(logits))\r\n\r\n return logits",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
class Order:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def order_num(self):
"""
Return order num of the order.
:return: str
"""
return self._order_number
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def name(self):
"""
Return item name of the order.
:return: str
"""
return self._name
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def holiday(self):
"""
Return the holiday that the item for.
:return: str
"""
return self._holiday
<|reserved_special_token_0|>
@property
def is_valid(self):
"""
Return the valid status.
:return: str
"""
return self._is_valid
def is_invalid(self):
"""
Set the status to invalid.
"""
self._is_valid = False
def set_invalid_notes(self, error):
"""
Set the invalid notes.
:param error: str
"""
self._invalid_notes = error
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Order:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def order_num(self):
"""
Return order num of the order.
:return: str
"""
return self._order_number
@property
def product_id(self):
"""
Return product id of the order.
:return: str
"""
return self._product_id
@property
def item_type(self):
"""
Return item type of the order.
:return: str
"""
return self._item_type
@property
def name(self):
"""
Return item name of the order.
:return: str
"""
return self._name
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def holiday(self):
"""
Return the holiday that the item for.
:return: str
"""
return self._holiday
<|reserved_special_token_0|>
@property
def is_valid(self):
"""
Return the valid status.
:return: str
"""
return self._is_valid
def is_invalid(self):
"""
Set the status to invalid.
"""
self._is_valid = False
def set_invalid_notes(self, error):
"""
Set the invalid notes.
:param error: str
"""
self._invalid_notes = error
def __str__(self):
"""
String method of the class.
"""
return (
f'Order Number: {self._order_number} Product ID: {self._product_id} Item: {self._item_type} Name: {self._name} Quantity: {self._quantity} Product details: {self._product_details} '
)
<|reserved_special_token_1|>
class Order:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def order_num(self):
"""
Return order num of the order.
:return: str
"""
return self._order_number
@property
def product_id(self):
"""
Return product id of the order.
:return: str
"""
return self._product_id
@property
def item_type(self):
"""
Return item type of the order.
:return: str
"""
return self._item_type
@property
def name(self):
"""
Return item name of the order.
:return: str
"""
return self._name
@property
def product_details(self):
"""
Return other details of the item of the order.
:return: str
"""
return self._product_details
<|reserved_special_token_0|>
@property
def holiday(self):
"""
Return the holiday that the item for.
:return: str
"""
return self._holiday
<|reserved_special_token_0|>
@property
def is_valid(self):
"""
Return the valid status.
:return: str
"""
return self._is_valid
def is_invalid(self):
"""
Set the status to invalid.
"""
self._is_valid = False
def set_invalid_notes(self, error):
"""
Set the invalid notes.
:param error: str
"""
self._invalid_notes = error
def __str__(self):
"""
String method of the class.
"""
return (
f'Order Number: {self._order_number} Product ID: {self._product_id} Item: {self._item_type} Name: {self._name} Quantity: {self._quantity} Product details: {self._product_details} '
)
<|reserved_special_token_1|>
class Order:
<|reserved_special_token_0|>
def __init__(self, order_number, product_id, item_type, name,
product_details, factory, quantity, holiday):
"""
Construct a new order
:param order_number: str
:param product_id: str
:param item_type: str
:param name: str
:param product_details: str
:param factory: Factory
:param quantity: int
:param holiday: str
"""
self._order_number = order_number
self._product_id = product_id
self._item_type = item_type
self._name = name
self._product_details = product_details
self._factory = factory
self._quantity = quantity
self._holiday = holiday
self._is_valid = True
self._invalid_notes = ''
@property
def quantity(self):
"""
Return quantity of the order.
:return: int
"""
return self._quantity
@property
def order_num(self):
"""
Return order num of the order.
:return: str
"""
return self._order_number
@property
def product_id(self):
"""
Return product id of the order.
:return: str
"""
return self._product_id
@property
def item_type(self):
"""
Return item type of the order.
:return: str
"""
return self._item_type
@property
def name(self):
"""
Return item name of the order.
:return: str
"""
return self._name
@property
def product_details(self):
"""
Return other details of the item of the order.
:return: str
"""
return self._product_details
@property
def factory(self):
"""
Return the factory that can generate the item.
:return: Factory
"""
return self._factory
@property
def holiday(self):
"""
Return the holiday that the item for.
:return: str
"""
return self._holiday
@property
def invalid_notes(self):
"""
Return the invalid notes if the item is invalid.
:return: str
"""
return self._invalid_notes
@property
def is_valid(self):
"""
Return the valid status.
:return: str
"""
return self._is_valid
def is_invalid(self):
"""
Set the status to invalid.
"""
self._is_valid = False
def set_invalid_notes(self, error):
"""
Set the invalid notes.
:param error: str
"""
self._invalid_notes = error
def __str__(self):
"""
String method of the class.
"""
return (
f'Order Number: {self._order_number} Product ID: {self._product_id} Item: {self._item_type} Name: {self._name} Quantity: {self._quantity} Product details: {self._product_details} '
)
<|reserved_special_token_1|>
class Order:
"""
Initiated a new order for the store
"""
def __init__(self, order_number, product_id, item_type, name, product_details, factory, quantity, holiday):
"""
Construct a new order
:param order_number: str
:param product_id: str
:param item_type: str
:param name: str
:param product_details: str
:param factory: Factory
:param quantity: int
:param holiday: str
"""
self._order_number = order_number
self._product_id = product_id
self._item_type = item_type
self._name = name
self._product_details = product_details
self._factory = factory
self._quantity = quantity
self._holiday = holiday
self._is_valid = True
self._invalid_notes = ""
@property
def quantity(self):
"""
Return quantity of the order.
:return: int
"""
return self._quantity
@property
def order_num(self):
"""
Return order num of the order.
:return: str
"""
return self._order_number
@property
def product_id(self):
"""
Return product id of the order.
:return: str
"""
return self._product_id
@property
def item_type(self):
"""
Return item type of the order.
:return: str
"""
return self._item_type
@property
def name(self):
"""
Return item name of the order.
:return: str
"""
return self._name
@property
def product_details(self):
"""
Return other details of the item of the order.
:return: str
"""
return self._product_details
@property
def factory(self):
"""
Return the factory that can generate the item.
:return: Factory
"""
return self._factory
@property
def holiday(self):
"""
Return the holiday that the item for.
:return: str
"""
return self._holiday
@property
def invalid_notes(self):
"""
Return the invalid notes if the item is invalid.
:return: str
"""
return self._invalid_notes
@property
def is_valid(self):
"""
Return the valid status.
:return: str
"""
return self._is_valid
def is_invalid(self):
"""
Set the status to invalid.
"""
self._is_valid = False
def set_invalid_notes(self, error):
"""
Set the invalid notes.
:param error: str
"""
self._invalid_notes = error
def __str__(self):
"""
String method of the class.
"""
return f"Order Number: {self._order_number} " \
f"Product ID: {self._product_id} " \
f"Item: {self._item_type} " \
f"Name: {self._name} " \
f"Quantity: {self._quantity} " \
f"Product details: {self._product_details} "
|
flexible
|
{
"blob_id": "0dce4ea8ef21f2535194330b82ce5706ae694247",
"index": 4676,
"step-1": "class Order:\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def order_num(self):\n \"\"\"\n Return order num of the order.\n :return: str\n \"\"\"\n return self._order_number\n <mask token>\n <mask token>\n\n @property\n def name(self):\n \"\"\"\n Return item name of the order.\n :return: str\n \"\"\"\n return self._name\n <mask token>\n <mask token>\n\n @property\n def holiday(self):\n \"\"\"\n Return the holiday that the item for.\n :return: str\n \"\"\"\n return self._holiday\n <mask token>\n\n @property\n def is_valid(self):\n \"\"\"\n Return the valid status.\n :return: str\n \"\"\"\n return self._is_valid\n\n def is_invalid(self):\n \"\"\"\n Set the status to invalid.\n \"\"\"\n self._is_valid = False\n\n def set_invalid_notes(self, error):\n \"\"\"\n Set the invalid notes.\n :param error: str\n \"\"\"\n self._invalid_notes = error\n <mask token>\n",
"step-2": "class Order:\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def order_num(self):\n \"\"\"\n Return order num of the order.\n :return: str\n \"\"\"\n return self._order_number\n\n @property\n def product_id(self):\n \"\"\"\n Return product id of the order.\n :return: str\n \"\"\"\n return self._product_id\n\n @property\n def item_type(self):\n \"\"\"\n Return item type of the order.\n :return: str\n \"\"\"\n return self._item_type\n\n @property\n def name(self):\n \"\"\"\n Return item name of the order.\n :return: str\n \"\"\"\n return self._name\n <mask token>\n <mask token>\n\n @property\n def holiday(self):\n \"\"\"\n Return the holiday that the item for.\n :return: str\n \"\"\"\n return self._holiday\n <mask token>\n\n @property\n def is_valid(self):\n \"\"\"\n Return the valid status.\n :return: str\n \"\"\"\n return self._is_valid\n\n def is_invalid(self):\n \"\"\"\n Set the status to invalid.\n \"\"\"\n self._is_valid = False\n\n def set_invalid_notes(self, error):\n \"\"\"\n Set the invalid notes.\n :param error: str\n \"\"\"\n self._invalid_notes = error\n\n def __str__(self):\n \"\"\"\n String method of the class.\n \"\"\"\n return (\n f'Order Number: {self._order_number} Product ID: {self._product_id} Item: {self._item_type} Name: {self._name} Quantity: {self._quantity} Product details: {self._product_details} '\n )\n",
"step-3": "class Order:\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def order_num(self):\n \"\"\"\n Return order num of the order.\n :return: str\n \"\"\"\n return self._order_number\n\n @property\n def product_id(self):\n \"\"\"\n Return product id of the order.\n :return: str\n \"\"\"\n return self._product_id\n\n @property\n def item_type(self):\n \"\"\"\n Return item type of the order.\n :return: str\n \"\"\"\n return self._item_type\n\n @property\n def name(self):\n \"\"\"\n Return item name of the order.\n :return: str\n \"\"\"\n return self._name\n\n @property\n def product_details(self):\n \"\"\"\n Return other details of the item of the order.\n :return: str\n \"\"\"\n return self._product_details\n <mask token>\n\n @property\n def holiday(self):\n \"\"\"\n Return the holiday that the item for.\n :return: str\n \"\"\"\n return self._holiday\n <mask token>\n\n @property\n def is_valid(self):\n \"\"\"\n Return the valid status.\n :return: str\n \"\"\"\n return self._is_valid\n\n def is_invalid(self):\n \"\"\"\n Set the status to invalid.\n \"\"\"\n self._is_valid = False\n\n def set_invalid_notes(self, error):\n \"\"\"\n Set the invalid notes.\n :param error: str\n \"\"\"\n self._invalid_notes = error\n\n def __str__(self):\n \"\"\"\n String method of the class.\n \"\"\"\n return (\n f'Order Number: {self._order_number} Product ID: {self._product_id} Item: {self._item_type} Name: {self._name} Quantity: {self._quantity} Product details: {self._product_details} '\n )\n",
"step-4": "class Order:\n <mask token>\n\n def __init__(self, order_number, product_id, item_type, name,\n product_details, factory, quantity, holiday):\n \"\"\"\n Construct a new order\n :param order_number: str\n :param product_id: str\n :param item_type: str\n :param name: str\n :param product_details: str\n :param factory: Factory\n :param quantity: int\n :param holiday: str\n \"\"\"\n self._order_number = order_number\n self._product_id = product_id\n self._item_type = item_type\n self._name = name\n self._product_details = product_details\n self._factory = factory\n self._quantity = quantity\n self._holiday = holiday\n self._is_valid = True\n self._invalid_notes = ''\n\n @property\n def quantity(self):\n \"\"\"\n Return quantity of the order.\n :return: int\n \"\"\"\n return self._quantity\n\n @property\n def order_num(self):\n \"\"\"\n Return order num of the order.\n :return: str\n \"\"\"\n return self._order_number\n\n @property\n def product_id(self):\n \"\"\"\n Return product id of the order.\n :return: str\n \"\"\"\n return self._product_id\n\n @property\n def item_type(self):\n \"\"\"\n Return item type of the order.\n :return: str\n \"\"\"\n return self._item_type\n\n @property\n def name(self):\n \"\"\"\n Return item name of the order.\n :return: str\n \"\"\"\n return self._name\n\n @property\n def product_details(self):\n \"\"\"\n Return other details of the item of the order.\n :return: str\n \"\"\"\n return self._product_details\n\n @property\n def factory(self):\n \"\"\"\n Return the factory that can generate the item.\n :return: Factory\n \"\"\"\n return self._factory\n\n @property\n def holiday(self):\n \"\"\"\n Return the holiday that the item for.\n :return: str\n \"\"\"\n return self._holiday\n\n @property\n def invalid_notes(self):\n \"\"\"\n Return the invalid notes if the item is invalid.\n :return: str\n \"\"\"\n return self._invalid_notes\n\n @property\n def is_valid(self):\n \"\"\"\n Return the valid status.\n :return: str\n \"\"\"\n return self._is_valid\n\n def is_invalid(self):\n \"\"\"\n Set the status to invalid.\n \"\"\"\n self._is_valid = False\n\n def set_invalid_notes(self, error):\n \"\"\"\n Set the invalid notes.\n :param error: str\n \"\"\"\n self._invalid_notes = error\n\n def __str__(self):\n \"\"\"\n String method of the class.\n \"\"\"\n return (\n f'Order Number: {self._order_number} Product ID: {self._product_id} Item: {self._item_type} Name: {self._name} Quantity: {self._quantity} Product details: {self._product_details} '\n )\n",
"step-5": "class Order:\n \"\"\"\n Initiated a new order for the store\n \"\"\"\n\n def __init__(self, order_number, product_id, item_type, name, product_details, factory, quantity, holiday):\n \"\"\"\n Construct a new order\n :param order_number: str\n :param product_id: str\n :param item_type: str\n :param name: str\n :param product_details: str\n :param factory: Factory\n :param quantity: int\n :param holiday: str\n \"\"\"\n self._order_number = order_number\n self._product_id = product_id\n self._item_type = item_type\n self._name = name\n self._product_details = product_details\n self._factory = factory\n self._quantity = quantity\n self._holiday = holiday\n self._is_valid = True\n self._invalid_notes = \"\"\n\n @property\n def quantity(self):\n \"\"\"\n Return quantity of the order.\n :return: int\n \"\"\"\n return self._quantity\n\n @property\n def order_num(self):\n \"\"\"\n Return order num of the order.\n :return: str\n \"\"\"\n return self._order_number\n\n @property\n def product_id(self):\n \"\"\"\n Return product id of the order.\n :return: str\n \"\"\"\n return self._product_id\n\n @property\n def item_type(self):\n \"\"\"\n Return item type of the order.\n :return: str\n \"\"\"\n return self._item_type\n\n @property\n def name(self):\n \"\"\"\n Return item name of the order.\n :return: str\n \"\"\"\n return self._name\n\n @property\n def product_details(self):\n \"\"\"\n Return other details of the item of the order.\n :return: str\n \"\"\"\n return self._product_details\n\n @property\n def factory(self):\n \"\"\"\n Return the factory that can generate the item.\n :return: Factory\n \"\"\"\n return self._factory\n\n @property\n def holiday(self):\n \"\"\"\n Return the holiday that the item for.\n :return: str\n \"\"\"\n return self._holiday\n\n @property\n def invalid_notes(self):\n \"\"\"\n Return the invalid notes if the item is invalid.\n :return: str\n \"\"\"\n return self._invalid_notes\n\n @property\n def is_valid(self):\n \"\"\"\n Return the valid status.\n :return: str\n \"\"\"\n return self._is_valid\n\n def is_invalid(self):\n \"\"\"\n Set the status to invalid.\n \"\"\"\n self._is_valid = False\n\n def set_invalid_notes(self, error):\n \"\"\"\n Set the invalid notes.\n :param error: str\n \"\"\"\n self._invalid_notes = error\n\n def __str__(self):\n \"\"\"\n String method of the class.\n \"\"\"\n return f\"Order Number: {self._order_number} \" \\\n f\"Product ID: {self._product_id} \" \\\n f\"Item: {self._item_type} \" \\\n f\"Name: {self._name} \" \\\n f\"Quantity: {self._quantity} \" \\\n f\"Product details: {self._product_details} \"\n",
"step-ids": [
7,
10,
11,
15,
17
]
}
|
[
7,
10,
11,
15,
17
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def solveMeFirst(a, b):
return a + b
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def solveMeFirst(a, b):
return a + b
print(solveMeFirst(int(input()), int(input())))
<|reserved_special_token_1|>
#!/bin/python3
def solveMeFirst(a,b):
return a + b
print(solveMeFirst(int(input()),int(input())))
|
flexible
|
{
"blob_id": "5d55c586c57de8f287d9f51f0cb1f188c8046c29",
"index": 2977,
"step-1": "<mask token>\n",
"step-2": "def solveMeFirst(a, b):\n return a + b\n\n\n<mask token>\n",
"step-3": "def solveMeFirst(a, b):\n return a + b\n\n\nprint(solveMeFirst(int(input()), int(input())))\n",
"step-4": "#!/bin/python3\n\ndef solveMeFirst(a,b):\n return a + b\n\nprint(solveMeFirst(int(input()),int(input())))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if os.path.isdir(project):
print('ERROR: Project exists')
exit()
os.mkdir(project)
os.chdir(project)
<|reserved_special_token_0|>
os.system(cmd)
<|reserved_special_token_0|>
with open('requirements.txt', 'w+') as ouf:
ouf.write(requirements)
os.system('env/bin/pip install -r requirements.txt')
<|reserved_special_token_0|>
with open('.gitignore', 'w+') as ouf:
ouf.write(gitignore)
os.system("git init && git add .gitignore && git commit -m 'Initial commit.'")
<|reserved_special_token_0|>
os.system(cmd)
<|reserved_special_token_0|>
os.system(cmd)
<|reserved_special_token_0|>
for line in fileinput.FileInput(settings_path, inplace=1):
if " 'django.contrib.staticfiles'," in line:
line = line.replace(line, line + settings_new_lines)
print(line, end='')
os.system("git add . && git commit -m 'Install Django project.'")
<|reserved_special_token_0|>
print(message.format(project))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
project = input('Enter short project name: ')
if os.path.isdir(project):
print('ERROR: Project exists')
exit()
os.mkdir(project)
os.chdir(project)
cmd = 'virtualenv env -p `which python3` --prompt=[django-' + project + ']'
os.system(cmd)
requirements = """django
flake8
autopep8
pytz
django-debug-toolbar
django-autofixture
"""
with open('requirements.txt', 'w+') as ouf:
ouf.write(requirements)
os.system('env/bin/pip install -r requirements.txt')
gitignore = """env
*.sqlite3
*_local*
*.pyc
__pycache__
*.rdb
*.log
log
static
"""
with open('.gitignore', 'w+') as ouf:
ouf.write(gitignore)
os.system("git init && git add .gitignore && git commit -m 'Initial commit.'")
cmd = 'env/bin/django-admin startproject ' + project
os.system(cmd)
cmd = 'mv ' + project + ' tmp && mv tmp/* . && rm -rf tmp'
os.system(cmd)
settings_new_lines = """ 'autofixture',
'debug_toolbar',
"""
settings_path = project + '/settings.py'
for line in fileinput.FileInput(settings_path, inplace=1):
if " 'django.contrib.staticfiles'," in line:
line = line.replace(line, line + settings_new_lines)
print(line, end='')
os.system("git add . && git commit -m 'Install Django project.'")
message = """
You can now type:
cd {0}
activate
"""
print(message.format(project))
<|reserved_special_token_1|>
import os
import fileinput
project = input('Enter short project name: ')
if os.path.isdir(project):
print('ERROR: Project exists')
exit()
os.mkdir(project)
os.chdir(project)
cmd = 'virtualenv env -p `which python3` --prompt=[django-' + project + ']'
os.system(cmd)
requirements = """django
flake8
autopep8
pytz
django-debug-toolbar
django-autofixture
"""
with open('requirements.txt', 'w+') as ouf:
ouf.write(requirements)
os.system('env/bin/pip install -r requirements.txt')
gitignore = """env
*.sqlite3
*_local*
*.pyc
__pycache__
*.rdb
*.log
log
static
"""
with open('.gitignore', 'w+') as ouf:
ouf.write(gitignore)
os.system("git init && git add .gitignore && git commit -m 'Initial commit.'")
cmd = 'env/bin/django-admin startproject ' + project
os.system(cmd)
cmd = 'mv ' + project + ' tmp && mv tmp/* . && rm -rf tmp'
os.system(cmd)
settings_new_lines = """ 'autofixture',
'debug_toolbar',
"""
settings_path = project + '/settings.py'
for line in fileinput.FileInput(settings_path, inplace=1):
if " 'django.contrib.staticfiles'," in line:
line = line.replace(line, line + settings_new_lines)
print(line, end='')
os.system("git add . && git commit -m 'Install Django project.'")
message = """
You can now type:
cd {0}
activate
"""
print(message.format(project))
<|reserved_special_token_1|>
#!/usr/bin/env python3
import os
import fileinput
project = input("Enter short project name: ")
if os.path.isdir(project):
print("ERROR: Project exists")
exit()
os.mkdir(project)
os.chdir(project)
cmd = "virtualenv env -p `which python3` --prompt=[django-" + project + "]"
os.system(cmd)
# Install django with default packages
requirements = """django
flake8
autopep8
pytz
django-debug-toolbar
django-autofixture
"""
with open('requirements.txt', 'w+') as ouf:
ouf.write(requirements)
os.system("env/bin/pip install -r requirements.txt")
# Initiate git repository
gitignore = """env
*.sqlite3
*_local*
*.pyc
__pycache__
*.rdb
*.log
log
static
"""
with open('.gitignore', 'w+') as ouf:
ouf.write(gitignore)
os.system("git init && git add .gitignore && git commit -m 'Initial commit.'")
cmd = "env/bin/django-admin startproject " + project
os.system(cmd)
cmd = "mv " + project + " tmp && mv tmp/* . && rm -rf tmp"
os.system(cmd)
settings_new_lines = """ 'autofixture',
'debug_toolbar',
"""
settings_path = project + '/settings.py'
for line in fileinput.FileInput(settings_path, inplace=1):
if " 'django.contrib.staticfiles'," in line:
line = line.replace(line, line + settings_new_lines)
print(line, end='')
os.system("git add . && git commit -m 'Install Django project.'")
# Output message
message = """
You can now type:
cd {0}
activate
"""
print(message.format(project))
|
flexible
|
{
"blob_id": "c700af6d44cd036212c9e4ae4932bc60630f961e",
"index": 6930,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif os.path.isdir(project):\n print('ERROR: Project exists')\n exit()\nos.mkdir(project)\nos.chdir(project)\n<mask token>\nos.system(cmd)\n<mask token>\nwith open('requirements.txt', 'w+') as ouf:\n ouf.write(requirements)\nos.system('env/bin/pip install -r requirements.txt')\n<mask token>\nwith open('.gitignore', 'w+') as ouf:\n ouf.write(gitignore)\nos.system(\"git init && git add .gitignore && git commit -m 'Initial commit.'\")\n<mask token>\nos.system(cmd)\n<mask token>\nos.system(cmd)\n<mask token>\nfor line in fileinput.FileInput(settings_path, inplace=1):\n if \" 'django.contrib.staticfiles',\" in line:\n line = line.replace(line, line + settings_new_lines)\n print(line, end='')\nos.system(\"git add . && git commit -m 'Install Django project.'\")\n<mask token>\nprint(message.format(project))\n",
"step-3": "<mask token>\nproject = input('Enter short project name: ')\nif os.path.isdir(project):\n print('ERROR: Project exists')\n exit()\nos.mkdir(project)\nos.chdir(project)\ncmd = 'virtualenv env -p `which python3` --prompt=[django-' + project + ']'\nos.system(cmd)\nrequirements = \"\"\"django\nflake8\nautopep8\npytz\ndjango-debug-toolbar\ndjango-autofixture\n\"\"\"\nwith open('requirements.txt', 'w+') as ouf:\n ouf.write(requirements)\nos.system('env/bin/pip install -r requirements.txt')\ngitignore = \"\"\"env\n*.sqlite3\n*_local*\n*.pyc\n__pycache__\n*.rdb\n*.log\nlog\nstatic\n\"\"\"\nwith open('.gitignore', 'w+') as ouf:\n ouf.write(gitignore)\nos.system(\"git init && git add .gitignore && git commit -m 'Initial commit.'\")\ncmd = 'env/bin/django-admin startproject ' + project\nos.system(cmd)\ncmd = 'mv ' + project + ' tmp && mv tmp/* . && rm -rf tmp'\nos.system(cmd)\nsettings_new_lines = \"\"\" 'autofixture',\n 'debug_toolbar',\n\"\"\"\nsettings_path = project + '/settings.py'\nfor line in fileinput.FileInput(settings_path, inplace=1):\n if \" 'django.contrib.staticfiles',\" in line:\n line = line.replace(line, line + settings_new_lines)\n print(line, end='')\nos.system(\"git add . && git commit -m 'Install Django project.'\")\nmessage = \"\"\"\n\nYou can now type:\ncd {0}\nactivate\n\"\"\"\nprint(message.format(project))\n",
"step-4": "import os\nimport fileinput\nproject = input('Enter short project name: ')\nif os.path.isdir(project):\n print('ERROR: Project exists')\n exit()\nos.mkdir(project)\nos.chdir(project)\ncmd = 'virtualenv env -p `which python3` --prompt=[django-' + project + ']'\nos.system(cmd)\nrequirements = \"\"\"django\nflake8\nautopep8\npytz\ndjango-debug-toolbar\ndjango-autofixture\n\"\"\"\nwith open('requirements.txt', 'w+') as ouf:\n ouf.write(requirements)\nos.system('env/bin/pip install -r requirements.txt')\ngitignore = \"\"\"env\n*.sqlite3\n*_local*\n*.pyc\n__pycache__\n*.rdb\n*.log\nlog\nstatic\n\"\"\"\nwith open('.gitignore', 'w+') as ouf:\n ouf.write(gitignore)\nos.system(\"git init && git add .gitignore && git commit -m 'Initial commit.'\")\ncmd = 'env/bin/django-admin startproject ' + project\nos.system(cmd)\ncmd = 'mv ' + project + ' tmp && mv tmp/* . && rm -rf tmp'\nos.system(cmd)\nsettings_new_lines = \"\"\" 'autofixture',\n 'debug_toolbar',\n\"\"\"\nsettings_path = project + '/settings.py'\nfor line in fileinput.FileInput(settings_path, inplace=1):\n if \" 'django.contrib.staticfiles',\" in line:\n line = line.replace(line, line + settings_new_lines)\n print(line, end='')\nos.system(\"git add . && git commit -m 'Install Django project.'\")\nmessage = \"\"\"\n\nYou can now type:\ncd {0}\nactivate\n\"\"\"\nprint(message.format(project))\n",
"step-5": "#!/usr/bin/env python3\n\nimport os\nimport fileinput\n\nproject = input(\"Enter short project name: \")\n\nif os.path.isdir(project):\n print(\"ERROR: Project exists\")\n exit()\n\nos.mkdir(project)\nos.chdir(project)\ncmd = \"virtualenv env -p `which python3` --prompt=[django-\" + project + \"]\"\nos.system(cmd)\n\n# Install django with default packages\nrequirements = \"\"\"django\nflake8\nautopep8\npytz\ndjango-debug-toolbar\ndjango-autofixture\n\"\"\"\nwith open('requirements.txt', 'w+') as ouf:\n ouf.write(requirements)\n\nos.system(\"env/bin/pip install -r requirements.txt\")\n\n# Initiate git repository\ngitignore = \"\"\"env\n*.sqlite3\n*_local*\n*.pyc\n__pycache__\n*.rdb\n*.log\nlog\nstatic\n\"\"\"\nwith open('.gitignore', 'w+') as ouf:\n ouf.write(gitignore)\n\nos.system(\"git init && git add .gitignore && git commit -m 'Initial commit.'\")\n\ncmd = \"env/bin/django-admin startproject \" + project\nos.system(cmd)\n\ncmd = \"mv \" + project + \" tmp && mv tmp/* . && rm -rf tmp\"\nos.system(cmd)\n\nsettings_new_lines = \"\"\" 'autofixture',\n 'debug_toolbar',\n\"\"\"\nsettings_path = project + '/settings.py'\nfor line in fileinput.FileInput(settings_path, inplace=1):\n if \" 'django.contrib.staticfiles',\" in line:\n line = line.replace(line, line + settings_new_lines)\n print(line, end='')\n\nos.system(\"git add . && git commit -m 'Install Django project.'\")\n\n# Output message\nmessage = \"\"\"\n\nYou can now type:\ncd {0}\nactivate\n\"\"\"\nprint(message.format(project))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from distutils.core import setup
setup(name='greeker',
version='0.3.2-git',
description="scrambles nouns in an XML document to produce a specimen for layout testing",
author="Brian Tingle",
author_email="[email protected]",
url="http://tingletech.github.com/greeker.py/",
install_requires=["inflect>=0.2.1", "lxml>=2.3.2", "nltk>=2.0.1rc2-git", "numpy", "argparse"],
py_modules=['greeker'],
scripts=['greeker.py'],
)
|
normal
|
{
"blob_id": "1fda8274024bdf74e7fbd4ac4a27d6cfe6032a13",
"index": 9790,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name='greeker', version='0.3.2-git', description=\n 'scrambles nouns in an XML document to produce a specimen for layout testing'\n , author='Brian Tingle', author_email=\n '[email protected]', url=\n 'http://tingletech.github.com/greeker.py/', install_requires=[\n 'inflect>=0.2.1', 'lxml>=2.3.2', 'nltk>=2.0.1rc2-git', 'numpy',\n 'argparse'], py_modules=['greeker'], scripts=['greeker.py'])\n",
"step-3": "from distutils.core import setup\nsetup(name='greeker', version='0.3.2-git', description=\n 'scrambles nouns in an XML document to produce a specimen for layout testing'\n , author='Brian Tingle', author_email=\n '[email protected]', url=\n 'http://tingletech.github.com/greeker.py/', install_requires=[\n 'inflect>=0.2.1', 'lxml>=2.3.2', 'nltk>=2.0.1rc2-git', 'numpy',\n 'argparse'], py_modules=['greeker'], scripts=['greeker.py'])\n",
"step-4": "from distutils.core import setup\nsetup(name='greeker',\n version='0.3.2-git',\n description=\"scrambles nouns in an XML document to produce a specimen for layout testing\",\n author=\"Brian Tingle\",\n author_email=\"[email protected]\",\n url=\"http://tingletech.github.com/greeker.py/\",\n install_requires=[\"inflect>=0.2.1\", \"lxml>=2.3.2\", \"nltk>=2.0.1rc2-git\", \"numpy\", \"argparse\"],\n py_modules=['greeker'],\n scripts=['greeker.py'],\n )\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Copyright (C) 2018-2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from openvino.tools.mo.ops.pack import PackOp
from openvino.tools.mo.front.extractor import FrontExtractorOp
from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs
class StackFrontExtractor(FrontExtractorOp):
op = 'stack'
enabled = True
@classmethod
def extract(cls, node):
attrs = get_mxnet_layer_attrs(node.symbol_dict)
update_attrs = {
'axis': attrs.int('axis', 0)
}
# update the attributes of the node
PackOp.update_node_stat(node, update_attrs)
return cls.enabled
|
normal
|
{
"blob_id": "dd71feda1ed5ff7ef9dee1573ad63939a3e09691",
"index": 7526,
"step-1": "<mask token>\n\n\nclass StackFrontExtractor(FrontExtractorOp):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass StackFrontExtractor(FrontExtractorOp):\n <mask token>\n <mask token>\n\n @classmethod\n def extract(cls, node):\n attrs = get_mxnet_layer_attrs(node.symbol_dict)\n update_attrs = {'axis': attrs.int('axis', 0)}\n PackOp.update_node_stat(node, update_attrs)\n return cls.enabled\n",
"step-3": "<mask token>\n\n\nclass StackFrontExtractor(FrontExtractorOp):\n op = 'stack'\n enabled = True\n\n @classmethod\n def extract(cls, node):\n attrs = get_mxnet_layer_attrs(node.symbol_dict)\n update_attrs = {'axis': attrs.int('axis', 0)}\n PackOp.update_node_stat(node, update_attrs)\n return cls.enabled\n",
"step-4": "from openvino.tools.mo.ops.pack import PackOp\nfrom openvino.tools.mo.front.extractor import FrontExtractorOp\nfrom openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs\n\n\nclass StackFrontExtractor(FrontExtractorOp):\n op = 'stack'\n enabled = True\n\n @classmethod\n def extract(cls, node):\n attrs = get_mxnet_layer_attrs(node.symbol_dict)\n update_attrs = {'axis': attrs.int('axis', 0)}\n PackOp.update_node_stat(node, update_attrs)\n return cls.enabled\n",
"step-5": "# Copyright (C) 2018-2023 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nfrom openvino.tools.mo.ops.pack import PackOp\nfrom openvino.tools.mo.front.extractor import FrontExtractorOp\nfrom openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs\n\n\nclass StackFrontExtractor(FrontExtractorOp):\n op = 'stack'\n enabled = True\n\n @classmethod\n def extract(cls, node):\n attrs = get_mxnet_layer_attrs(node.symbol_dict)\n\n update_attrs = {\n 'axis': attrs.int('axis', 0)\n }\n\n # update the attributes of the node\n PackOp.update_node_stat(node, update_attrs)\n\n return cls.enabled\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from kivy.app import App
from kivy.lang import Builder
from kivy.uix.screenmanager import ScreenManager, Screen
import subprocess
import socket
from kivy.uix.button import Button
from kivy.uix.button import Label
from kivy.uix.boxlayout import BoxLayout
Builder.load_string("""
<MenuScreen>:
BoxLayout:
orientation: "vertical"
<SettingsScreen>:
BoxLayout:
orientation: "vertical"
Button:
text: 'Scan For Networks'
on_release:
root.manager.current = 'networks'
root.scan()
Button:
text: 'Back to menu'
on_release:
root.manager.transition.direction = 'right'
root.manager.current = 'menu'
<NetworksScreen>:
BoxLayout:
orientation: "vertical"
""")
ssids = []
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Declare both screens
class MenuScreen(Screen):
def __init__(self, **kwargs):
super(Screen, self).__init__(**kwargs)
vLayout2 = BoxLayout(orientation='vertical')
self.add_widget(vLayout2)
settings_button = Button(text='Settings')
vLayout2.add_widget(settings_button)
settings_button.bind(on_press=self.forwardFunction)
test_button = Button(text='Test')
vLayout2.add_widget(test_button)
test_button.bind(on_press=self.forwardFunction2)
quit_button = Button(text='Quit')
vLayout2.add_widget(quit_button)
quit_button.bind(on_press=self.closeButton)
def closeButton(self, placeholder):
s.close()
App.get_running_app().stop()
def forwardFunction(self, next_screen):
sm.transition.direction = 'left'
sm.current = 'settings'
def forwardFunction2(self, next_screen):
sm.transition.direction = 'left'
sm.current = 'testing'
class TestScreen(Screen):
def __init__(self, **kwargs):
super(Screen, self).__init__(**kwargs)
vLayout3 = BoxLayout(orientation='vertical')
self.add_widget(vLayout3)
test_button = Button(text='Send Message',pos = (100,25), size=(100, 25), size_hint=(.15, None))
self.add_widget(test_button)
test_button.bind(on_press=self.sendData)
back_button = Button(text='Back to Menu', size=(100, 25), size_hint=(.15, None))
vLayout3.add_widget(back_button)
back_button.bind(on_press=self.backFunction)
def sendData(self, placeholder):
data = 'Test Worked'
try:
s.send(data.encode('utf-8'))
except socket.error:
print("An error has occurred... closing connection to server")
finally:
s.shutdown(socket.SHUT_RDWR)
s.close()
def backFunction(self, next_screen):
sm.transition.direction = 'right'
sm.current = 'menu'
class NetworksScreen(Screen):
#def settings_release(self):
def __init__(self, **kwargs):
super(Screen, self).__init__(**kwargs)
def backFunction(self, next_screen):
sm.transition.direction = 'right'
sm.current = 'settings'
def connectWifi(self, placeholder):
#s = socket.socket() # Create a socket object
host = socket.gethostname() # Get local machine name
port = 12345 # Reserve a port for your service.
try:
s.connect((host, port))
print(s.recv(1024))
except socket.error:
print("An error has occurred... closing connection to server")
finally:
#s.shutdown(socket.SHUT_RDWR)
#s.close()
def printButtons(self):
y = 0
s2 = self.manager.get_screen('settings')
vLayout = BoxLayout(orientation='vertical')
self.add_widget(vLayout)
while y < len(ssids) - 1:
button = Button(text=ssids[y])
button.bind(on_press=self.connectWifi)
vLayout.add_widget(button)
y += 1
back_button = Button(text='Back to Settings')
vLayout.add_widget(back_button)
back_button.bind(on_press=self.backFunction)
class SettingsScreen(Screen):
def scan(self):
results = subprocess.check_output(["netsh", "wlan", "show", "network"])
results = results.decode("ascii") # needed in python 3
results = results.replace("\r", "")
ls = results.split("\n")
ls = ls[4:]
x = 0
y = 0
while x < len(ls):
if x % 5 == 0:
ssids.append(ls[x])
x += 1
while y < len(ssids)-1:
y += 1
s2 = self.manager.get_screen('networks')
s2.printButtons()
# Create the screen manager
sm = ScreenManager()
sm.add_widget(MenuScreen(name='menu'))
sm.add_widget(SettingsScreen(name='settings'))
sm.add_widget(TestScreen(name='testing'))
sm.add_widget(NetworksScreen(name='networks'))
class TestApp(App):
def build(self):
return sm
if __name__ == '__main__':
TestApp().run()
|
normal
|
{
"blob_id": "237a647e7bf0b1c12abd78b1ef6e293e73232a6c",
"index": 2217,
"step-1": "from kivy.app import App\nfrom kivy.lang import Builder\nfrom kivy.uix.screenmanager import ScreenManager, Screen\nimport subprocess\nimport socket\nfrom kivy.uix.button import Button\nfrom kivy.uix.button import Label\nfrom kivy.uix.boxlayout import BoxLayout\n\nBuilder.load_string(\"\"\"\n<MenuScreen>:\n BoxLayout:\n orientation: \"vertical\"\n\n<SettingsScreen>:\n BoxLayout:\n orientation: \"vertical\"\n Button:\n text: 'Scan For Networks'\n on_release:\n root.manager.current = 'networks'\n root.scan()\n\n\n Button:\n text: 'Back to menu'\n on_release:\n root.manager.transition.direction = 'right'\n root.manager.current = 'menu'\n\n<NetworksScreen>:\n BoxLayout:\n orientation: \"vertical\"\n\"\"\")\n\nssids = []\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\n# Declare both screens\nclass MenuScreen(Screen):\n\n def __init__(self, **kwargs):\n super(Screen, self).__init__(**kwargs)\n vLayout2 = BoxLayout(orientation='vertical')\n self.add_widget(vLayout2)\n\n settings_button = Button(text='Settings')\n vLayout2.add_widget(settings_button)\n settings_button.bind(on_press=self.forwardFunction)\n\n test_button = Button(text='Test')\n vLayout2.add_widget(test_button)\n test_button.bind(on_press=self.forwardFunction2)\n\n quit_button = Button(text='Quit')\n vLayout2.add_widget(quit_button)\n quit_button.bind(on_press=self.closeButton)\n\n def closeButton(self, placeholder):\n s.close()\n App.get_running_app().stop()\n\n def forwardFunction(self, next_screen):\n sm.transition.direction = 'left'\n sm.current = 'settings'\n\n def forwardFunction2(self, next_screen):\n sm.transition.direction = 'left'\n sm.current = 'testing'\n\n\n\nclass TestScreen(Screen):\n\n def __init__(self, **kwargs):\n super(Screen, self).__init__(**kwargs)\n\n vLayout3 = BoxLayout(orientation='vertical')\n self.add_widget(vLayout3)\n test_button = Button(text='Send Message',pos = (100,25), size=(100, 25), size_hint=(.15, None))\n self.add_widget(test_button)\n test_button.bind(on_press=self.sendData)\n back_button = Button(text='Back to Menu', size=(100, 25), size_hint=(.15, None))\n vLayout3.add_widget(back_button)\n back_button.bind(on_press=self.backFunction)\n\n\n def sendData(self, placeholder):\n data = 'Test Worked'\n try:\n s.send(data.encode('utf-8'))\n except socket.error:\n print(\"An error has occurred... closing connection to server\")\n finally:\n s.shutdown(socket.SHUT_RDWR)\n s.close()\n\n\n def backFunction(self, next_screen):\n sm.transition.direction = 'right'\n sm.current = 'menu'\n\nclass NetworksScreen(Screen):\n #def settings_release(self):\n def __init__(self, **kwargs):\n super(Screen, self).__init__(**kwargs)\n\n def backFunction(self, next_screen):\n sm.transition.direction = 'right'\n sm.current = 'settings'\n\n def connectWifi(self, placeholder):\n #s = socket.socket() # Create a socket object\n host = socket.gethostname() # Get local machine name\n port = 12345 # Reserve a port for your service.\n\n try:\n s.connect((host, port))\n print(s.recv(1024))\n except socket.error:\n print(\"An error has occurred... closing connection to server\")\n finally:\n #s.shutdown(socket.SHUT_RDWR)\n #s.close()\n\n\n def printButtons(self):\n y = 0\n s2 = self.manager.get_screen('settings')\n vLayout = BoxLayout(orientation='vertical')\n self.add_widget(vLayout)\n while y < len(ssids) - 1:\n button = Button(text=ssids[y])\n button.bind(on_press=self.connectWifi)\n vLayout.add_widget(button)\n y += 1\n\n back_button = Button(text='Back to Settings')\n vLayout.add_widget(back_button)\n back_button.bind(on_press=self.backFunction)\n\nclass SettingsScreen(Screen):\n\n def scan(self):\n\n results = subprocess.check_output([\"netsh\", \"wlan\", \"show\", \"network\"])\n results = results.decode(\"ascii\") # needed in python 3\n results = results.replace(\"\\r\", \"\")\n ls = results.split(\"\\n\")\n ls = ls[4:]\n x = 0\n y = 0\n\n while x < len(ls):\n if x % 5 == 0:\n ssids.append(ls[x])\n x += 1\n\n while y < len(ssids)-1:\n y += 1\n\n s2 = self.manager.get_screen('networks')\n s2.printButtons()\n\n\n# Create the screen manager\nsm = ScreenManager()\nsm.add_widget(MenuScreen(name='menu'))\nsm.add_widget(SettingsScreen(name='settings'))\nsm.add_widget(TestScreen(name='testing'))\nsm.add_widget(NetworksScreen(name='networks'))\n\nclass TestApp(App):\n\n def build(self):\n return sm\n\nif __name__ == '__main__':\n TestApp().run()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def molecule_to_list(molecule: str) ->list:
"""Splits up a molucule into elements and amount in order of appearance
Args:
molecule (str): The molecule to split up
Raises:
ValueError: If molecule starts with a lower case letter
ValueError: If molecule contains a non-alphanumeric character
ValueError: If an element starts with a lower case letter
Returns:
list: A list of tuples containing the element symbol and the number of
its appearances at that position
"""
if molecule[0].islower():
raise ValueError
if re.match('^[\\w]+$', molecule) is None:
raise ValueError
result = []
elements = re.findall('([A-Z][a-z]?|[a-z]{1,2})(\\d{1,2})?', molecule)
for element in elements:
if element[0].islower():
raise ValueError
if element[1] == '':
result.append((element[0], 1))
else:
result.append((element[0], int(element[1])))
return result
<|reserved_special_token_1|>
import re
def molecule_to_list(molecule: str) ->list:
"""Splits up a molucule into elements and amount in order of appearance
Args:
molecule (str): The molecule to split up
Raises:
ValueError: If molecule starts with a lower case letter
ValueError: If molecule contains a non-alphanumeric character
ValueError: If an element starts with a lower case letter
Returns:
list: A list of tuples containing the element symbol and the number of
its appearances at that position
"""
if molecule[0].islower():
raise ValueError
if re.match('^[\\w]+$', molecule) is None:
raise ValueError
result = []
elements = re.findall('([A-Z][a-z]?|[a-z]{1,2})(\\d{1,2})?', molecule)
for element in elements:
if element[0].islower():
raise ValueError
if element[1] == '':
result.append((element[0], 1))
else:
result.append((element[0], int(element[1])))
return result
<|reserved_special_token_1|>
import re
def molecule_to_list(molecule: str) -> list:
"""Splits up a molucule into elements and amount in order of appearance
Args:
molecule (str): The molecule to split up
Raises:
ValueError: If molecule starts with a lower case letter
ValueError: If molecule contains a non-alphanumeric character
ValueError: If an element starts with a lower case letter
Returns:
list: A list of tuples containing the element symbol and the number of
its appearances at that position
"""
if molecule[0].islower():
raise ValueError
# Test if molecule contains non-alphanumeric characters
if re.match(r"^[\w]+$", molecule) is None:
raise ValueError
result = []
# Split molecule into elements and amounts
elements = re.findall(r"([A-Z][a-z]?|[a-z]{1,2})(\d{1,2})?", molecule)
for element in elements:
if element[0].islower():
raise ValueError
# Ensure the result has a numerical value
if element[1] == '':
result.append((element[0], 1))
else:
result.append((element[0], int(element[1])))
return result
|
flexible
|
{
"blob_id": "a14a1803a0bae755803c471b12035398de262dbc",
"index": 9138,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef molecule_to_list(molecule: str) ->list:\n \"\"\"Splits up a molucule into elements and amount in order of appearance\n\n Args:\n molecule (str): The molecule to split up\n\n Raises:\n ValueError: If molecule starts with a lower case letter\n ValueError: If molecule contains a non-alphanumeric character\n ValueError: If an element starts with a lower case letter\n\n Returns:\n list: A list of tuples containing the element symbol and the number of\n its appearances at that position\n \"\"\"\n if molecule[0].islower():\n raise ValueError\n if re.match('^[\\\\w]+$', molecule) is None:\n raise ValueError\n result = []\n elements = re.findall('([A-Z][a-z]?|[a-z]{1,2})(\\\\d{1,2})?', molecule)\n for element in elements:\n if element[0].islower():\n raise ValueError\n if element[1] == '':\n result.append((element[0], 1))\n else:\n result.append((element[0], int(element[1])))\n return result\n",
"step-3": "import re\n\n\ndef molecule_to_list(molecule: str) ->list:\n \"\"\"Splits up a molucule into elements and amount in order of appearance\n\n Args:\n molecule (str): The molecule to split up\n\n Raises:\n ValueError: If molecule starts with a lower case letter\n ValueError: If molecule contains a non-alphanumeric character\n ValueError: If an element starts with a lower case letter\n\n Returns:\n list: A list of tuples containing the element symbol and the number of\n its appearances at that position\n \"\"\"\n if molecule[0].islower():\n raise ValueError\n if re.match('^[\\\\w]+$', molecule) is None:\n raise ValueError\n result = []\n elements = re.findall('([A-Z][a-z]?|[a-z]{1,2})(\\\\d{1,2})?', molecule)\n for element in elements:\n if element[0].islower():\n raise ValueError\n if element[1] == '':\n result.append((element[0], 1))\n else:\n result.append((element[0], int(element[1])))\n return result\n",
"step-4": "import re\n\n\ndef molecule_to_list(molecule: str) -> list:\n \"\"\"Splits up a molucule into elements and amount in order of appearance\n\n Args:\n molecule (str): The molecule to split up\n\n Raises:\n ValueError: If molecule starts with a lower case letter\n ValueError: If molecule contains a non-alphanumeric character\n ValueError: If an element starts with a lower case letter\n\n Returns:\n list: A list of tuples containing the element symbol and the number of\n its appearances at that position\n \"\"\"\n if molecule[0].islower():\n raise ValueError\n # Test if molecule contains non-alphanumeric characters\n if re.match(r\"^[\\w]+$\", molecule) is None:\n raise ValueError\n result = []\n # Split molecule into elements and amounts\n elements = re.findall(r\"([A-Z][a-z]?|[a-z]{1,2})(\\d{1,2})?\", molecule)\n for element in elements:\n if element[0].islower():\n raise ValueError\n # Ensure the result has a numerical value\n if element[1] == '':\n result.append((element[0], 1))\n else:\n result.append((element[0], int(element[1])))\n return result\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from flask_restful import Resource, reqparse
from db import query
import pymysql
from flask_jwt_extended import jwt_required
"""
This module is used to retrieve the data
for all the request_no's which have a false or a 0 select_status.
This is done by selecting distinct request_no's from requests table
for those rows where select_status = 0
"""
# This resource is for the admin to obtain all the rows in the requests table
# with a particular request_no
class AdminReqNoDetails(Resource):
@jwt_required
def get(self):
parser = reqparse.RequestParser()
parser.add_argument('request_no', type=int, required=True, help="request_no cannot be left blank!")
data = parser.parse_args()
#create query string
qstr = f""" SELECT r_id,request_no,image FROM requests WHERE request_no = {data['request_no']}; """
try:
return query(qstr)
except:
return {
"message" : "There was an error connecting to the requests table while retrieving."
}, 500
|
normal
|
{
"blob_id": "d436362468b847e427bc14ca221cf0fe4b2623e3",
"index": 4408,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass AdminReqNoDetails(Resource):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass AdminReqNoDetails(Resource):\n\n @jwt_required\n def get(self):\n parser = reqparse.RequestParser()\n parser.add_argument('request_no', type=int, required=True, help=\n 'request_no cannot be left blank!')\n data = parser.parse_args()\n qstr = (\n f\" SELECT r_id,request_no,image FROM requests WHERE request_no = {data['request_no']}; \"\n )\n try:\n return query(qstr)\n except:\n return {'message':\n 'There was an error connecting to the requests table while retrieving.'\n }, 500\n",
"step-4": "from flask_restful import Resource, reqparse\nfrom db import query\nimport pymysql\nfrom flask_jwt_extended import jwt_required\n<mask token>\n\n\nclass AdminReqNoDetails(Resource):\n\n @jwt_required\n def get(self):\n parser = reqparse.RequestParser()\n parser.add_argument('request_no', type=int, required=True, help=\n 'request_no cannot be left blank!')\n data = parser.parse_args()\n qstr = (\n f\" SELECT r_id,request_no,image FROM requests WHERE request_no = {data['request_no']}; \"\n )\n try:\n return query(qstr)\n except:\n return {'message':\n 'There was an error connecting to the requests table while retrieving.'\n }, 500\n",
"step-5": "from flask_restful import Resource, reqparse\r\nfrom db import query\r\nimport pymysql\r\nfrom flask_jwt_extended import jwt_required\r\n\r\n\"\"\"\r\nThis module is used to retrieve the data \r\nfor all the request_no's which have a false or a 0 select_status.\r\nThis is done by selecting distinct request_no's from requests table \r\nfor those rows where select_status = 0\r\n\"\"\"\r\n\r\n# This resource is for the admin to obtain all the rows in the requests table \r\n# with a particular request_no\r\nclass AdminReqNoDetails(Resource):\r\n \r\n @jwt_required\r\n def get(self):\r\n parser = reqparse.RequestParser()\r\n parser.add_argument('request_no', type=int, required=True, help=\"request_no cannot be left blank!\")\r\n data = parser.parse_args()\r\n #create query string\r\n qstr = f\"\"\" SELECT r_id,request_no,image FROM requests WHERE request_no = {data['request_no']}; \"\"\"\r\n try:\r\n return query(qstr)\r\n except:\r\n return {\r\n \"message\" : \"There was an error connecting to the requests table while retrieving.\"\r\n }, 500\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class AddReportForm(ReportBaseForm):
"""Form for adding new report."""
pass
class EditReportForm(ReportBaseForm):
"""Form for editing a report."""
cleared_file = HiddenField('cleared_file')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class EditUserForm(UserBaseForm):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def validate_email(form, field):
"""Make sure email is not in use
unless it's the current user's email."""
user = User.query.filter_by(email=form.email.data).first()
if user and not user == g.user:
form.email.errors = ['Email already associated with account!',
*form.email.errors]
raise ValidationError
def validate_username(form, field):
"""Make sure username is not in use
unless it's the current user's username."""
user = User.query.filter_by(username=form.username.data).first()
if user and not user == g.user:
form.username.errors = ['Username already taken!', *form.
username.errors]
raise ValidationError
class LoginForm(FlaskForm):
email = EmailField('Email', validators=[InputRequired(message=
'Email cannot be blank.'), Length(min=5, max=320), Email(
check_deliverability=True, message='Invalid Email address')])
password = PasswordField('Password', validators=[InputRequired(message=
'Password cannot be blank.'), Length(min=8, max=60)])
class ReportBaseForm(FlaskForm):
"""Form for adding new report."""
text = TextAreaField('Report', validators=[InputRequired(message=
'Report cannot be blank.'), Length(min=2)])
photo_url = URLField('Photo URL', validators=[URL(), Optional()],
description=
"""
Either enter a photo URL or
choose an image file to include an image."""
)
photo_file = FileField('Upload Photo', validators=[Optional()],
description=
"""
Either enter a photo URL or
choose an image file to include an image. 4MB max."""
)
def validate(self):
if not super().validate():
return False
if self.photo_url.data and self.photo_file.data:
msg = 'Please specify Photo URL or upload a photo, not both'
self.photo_url.errors.append(msg)
self.photo_file.errors.append(msg)
return False
return True
class AddReportForm(ReportBaseForm):
"""Form for adding new report."""
pass
class EditReportForm(ReportBaseForm):
"""Form for editing a report."""
cleared_file = HiddenField('cleared_file')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AddUserForm(UserBaseForm):
password = PasswordField('Password', validators=[InputRequired(message=
'Password cannot be blank.'), Length(min=8, max=60), Regexp(
'^(?=.*[A-Za-z])(?=.*\\d)(?=.*[$@$!%*#?&])[A-Za-z\\d$@$!%*#?&]{8,}$',
message='Please match the given requirements for password.')],
description=
'Minimum one each - uppercase letter, lowercase letter, number, special character.'
)
def validate_email(form, field):
"""Make sure email not in use."""
if User.query.filter_by(email=form.email.data).first():
form.email.errors.append('Email already associated with account!')
raise ValidationError
def validate_username(form, field):
"""Make sure username not in use."""
if User.query.filter_by(username=form.username.data).first():
form.username.errors.append('Username already taken!')
raise ValidationError
class EditUserForm(UserBaseForm):
"""Edit User Form."""
avatar_url = URLField('Avatar Image URL', validators=[Length(min=6, max
=255), Optional()], description='Online image address')
banner_url = URLField('Banner Image URL', validators=[Length(min=6, max
=255), Optional()], description='Online image address')
byline = StringField('User Byline', validators=[Length(min=2, max=200),
Optional()], description='A short snippet shown under your username')
bio = TextAreaField('User Bio', validators=[Length(min=2, max=500),
Optional()], description='500 character max')
city = StringField('City', validators=[Length(min=2, max=50), Optional()])
state = StringField('State', validators=[Length(min=2, max=50), Optional()]
)
country = StringField('Country', validators=[Length(min=2, max=50),
Optional()])
def validate_email(form, field):
"""Make sure email is not in use
unless it's the current user's email."""
user = User.query.filter_by(email=form.email.data).first()
if user and not user == g.user:
form.email.errors = ['Email already associated with account!',
*form.email.errors]
raise ValidationError
def validate_username(form, field):
"""Make sure username is not in use
unless it's the current user's username."""
user = User.query.filter_by(username=form.username.data).first()
if user and not user == g.user:
form.username.errors = ['Username already taken!', *form.
username.errors]
raise ValidationError
class LoginForm(FlaskForm):
email = EmailField('Email', validators=[InputRequired(message=
'Email cannot be blank.'), Length(min=5, max=320), Email(
check_deliverability=True, message='Invalid Email address')])
password = PasswordField('Password', validators=[InputRequired(message=
'Password cannot be blank.'), Length(min=8, max=60)])
class ReportBaseForm(FlaskForm):
"""Form for adding new report."""
text = TextAreaField('Report', validators=[InputRequired(message=
'Report cannot be blank.'), Length(min=2)])
photo_url = URLField('Photo URL', validators=[URL(), Optional()],
description=
"""
Either enter a photo URL or
choose an image file to include an image."""
)
photo_file = FileField('Upload Photo', validators=[Optional()],
description=
"""
Either enter a photo URL or
choose an image file to include an image. 4MB max."""
)
def validate(self):
if not super().validate():
return False
if self.photo_url.data and self.photo_file.data:
msg = 'Please specify Photo URL or upload a photo, not both'
self.photo_url.errors.append(msg)
self.photo_file.errors.append(msg)
return False
return True
class AddReportForm(ReportBaseForm):
"""Form for adding new report."""
pass
class EditReportForm(ReportBaseForm):
"""Form for editing a report."""
cleared_file = HiddenField('cleared_file')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserBaseForm(FlaskForm):
email = EmailField('Email', validators=[InputRequired(message=
'Email cannot be blank.'), Length(min=5, max=320), Email(
check_deliverability=True, message='Invalid Email address')])
username = StringField('Username', validators=[InputRequired(message=
'Username cannot be blank.'), Length(min=2, max=30)])
class AddUserForm(UserBaseForm):
password = PasswordField('Password', validators=[InputRequired(message=
'Password cannot be blank.'), Length(min=8, max=60), Regexp(
'^(?=.*[A-Za-z])(?=.*\\d)(?=.*[$@$!%*#?&])[A-Za-z\\d$@$!%*#?&]{8,}$',
message='Please match the given requirements for password.')],
description=
'Minimum one each - uppercase letter, lowercase letter, number, special character.'
)
def validate_email(form, field):
"""Make sure email not in use."""
if User.query.filter_by(email=form.email.data).first():
form.email.errors.append('Email already associated with account!')
raise ValidationError
def validate_username(form, field):
"""Make sure username not in use."""
if User.query.filter_by(username=form.username.data).first():
form.username.errors.append('Username already taken!')
raise ValidationError
class EditUserForm(UserBaseForm):
"""Edit User Form."""
avatar_url = URLField('Avatar Image URL', validators=[Length(min=6, max
=255), Optional()], description='Online image address')
banner_url = URLField('Banner Image URL', validators=[Length(min=6, max
=255), Optional()], description='Online image address')
byline = StringField('User Byline', validators=[Length(min=2, max=200),
Optional()], description='A short snippet shown under your username')
bio = TextAreaField('User Bio', validators=[Length(min=2, max=500),
Optional()], description='500 character max')
city = StringField('City', validators=[Length(min=2, max=50), Optional()])
state = StringField('State', validators=[Length(min=2, max=50), Optional()]
)
country = StringField('Country', validators=[Length(min=2, max=50),
Optional()])
def validate_email(form, field):
"""Make sure email is not in use
unless it's the current user's email."""
user = User.query.filter_by(email=form.email.data).first()
if user and not user == g.user:
form.email.errors = ['Email already associated with account!',
*form.email.errors]
raise ValidationError
def validate_username(form, field):
"""Make sure username is not in use
unless it's the current user's username."""
user = User.query.filter_by(username=form.username.data).first()
if user and not user == g.user:
form.username.errors = ['Username already taken!', *form.
username.errors]
raise ValidationError
class LoginForm(FlaskForm):
email = EmailField('Email', validators=[InputRequired(message=
'Email cannot be blank.'), Length(min=5, max=320), Email(
check_deliverability=True, message='Invalid Email address')])
password = PasswordField('Password', validators=[InputRequired(message=
'Password cannot be blank.'), Length(min=8, max=60)])
class ReportBaseForm(FlaskForm):
"""Form for adding new report."""
text = TextAreaField('Report', validators=[InputRequired(message=
'Report cannot be blank.'), Length(min=2)])
photo_url = URLField('Photo URL', validators=[URL(), Optional()],
description=
"""
Either enter a photo URL or
choose an image file to include an image."""
)
photo_file = FileField('Upload Photo', validators=[Optional()],
description=
"""
Either enter a photo URL or
choose an image file to include an image. 4MB max."""
)
def validate(self):
if not super().validate():
return False
if self.photo_url.data and self.photo_file.data:
msg = 'Please specify Photo URL or upload a photo, not both'
self.photo_url.errors.append(msg)
self.photo_file.errors.append(msg)
return False
return True
class AddReportForm(ReportBaseForm):
"""Form for adding new report."""
pass
class EditReportForm(ReportBaseForm):
"""Form for editing a report."""
cleared_file = HiddenField('cleared_file')
<|reserved_special_token_1|>
from flask_wtf import FlaskForm
from wtforms import (
StringField, TextAreaField, PasswordField, HiddenField)
from wtforms.fields.html5 import URLField, EmailField
from flask_wtf.file import FileField
from wtforms.validators import (
InputRequired, Length, Email,
Optional, URL, ValidationError, Regexp)
from models import User
from flask import g
class UserBaseForm(FlaskForm):
email = EmailField("Email", validators=[
InputRequired(message="Email cannot be blank."),
Length(min=5, max=320),
Email(check_deliverability=True,
message="Invalid Email address")])
username = StringField("Username", validators=[
InputRequired(message="Username cannot be blank."),
Length(min=2, max=30)])
class AddUserForm(UserBaseForm):
password = PasswordField("Password", validators=[
InputRequired(message="Password cannot be blank."),
Length(min=8, max=60),
Regexp("^(?=.*[A-Za-z])(?=.*\d)(?=.*[$@$!%*#?&])[A-Za-z\d$@$!%*#?&]{8,}$", message='Please match the given requirements for password.')], # noqa e501
description="Minimum one each - uppercase letter, lowercase letter, number, special character.") # noqa e501
def validate_email(form, field):
"""Make sure email not in use."""
if User.query.filter_by(email=form.email.data).first():
form.email.errors.append(
"Email already associated with account!")
raise ValidationError
def validate_username(form, field):
"""Make sure username not in use."""
if User.query.filter_by(username=form.username.data).first():
form.username.errors.append("Username already taken!")
raise ValidationError
class EditUserForm(UserBaseForm):
"""Edit User Form."""
avatar_url = URLField("Avatar Image URL", validators=[
Length(min=6, max=255), Optional()],
description="Online image address")
banner_url = URLField("Banner Image URL", validators=[
Length(min=6, max=255), Optional()],
description="Online image address")
byline = StringField("User Byline", validators=[
Length(min=2, max=200), Optional()],
description="A short snippet shown under your username")
bio = TextAreaField("User Bio", validators=[
Length(min=2, max=500), Optional()],
description="500 character max")
city = StringField("City", validators=[Length(min=2, max=50), Optional()])
state = StringField("State", validators=[
Length(min=2, max=50), Optional()])
country = StringField("Country", validators=[
Length(min=2, max=50), Optional()])
def validate_email(form, field):
"""Make sure email is not in use
unless it's the current user's email."""
user = User.query.filter_by(email=form.email.data).first()
if user and not user == g.user:
form.email.errors = [
"Email already associated with account!",
*form.email.errors
]
raise ValidationError
def validate_username(form, field):
"""Make sure username is not in use
unless it's the current user's username."""
user = User.query.filter_by(username=form.username.data).first()
if user and not user == g.user:
form.username.errors = [
"Username already taken!",
*form.username.errors
]
raise ValidationError
class LoginForm(FlaskForm):
email = EmailField("Email", validators=[
InputRequired(message="Email cannot be blank."),
Length(min=5, max=320),
Email(check_deliverability=True,
message="Invalid Email address")])
password = PasswordField("Password", validators=[
InputRequired(
message="Password cannot be blank."),
Length(min=8, max=60)])
class ReportBaseForm(FlaskForm):
"""Form for adding new report."""
text = TextAreaField("Report", validators=[
InputRequired(message="Report cannot be blank."),
Length(min=2)])
photo_url = URLField(
"Photo URL", validators=[URL(), Optional()],
description="""
Either enter a photo URL or
choose an image file to include an image.""")
photo_file = FileField(
"Upload Photo", validators=[Optional()],
description="""
Either enter a photo URL or
choose an image file to include an image. 4MB max.""")
def validate(self):
if not super().validate():
return False
if self.photo_url.data and self.photo_file.data:
msg = 'Please specify Photo URL or upload a photo, not both'
self.photo_url.errors.append(msg)
self.photo_file.errors.append(msg)
return False
return True
class AddReportForm(ReportBaseForm):
"""Form for adding new report."""
pass
class EditReportForm(ReportBaseForm):
"""Form for editing a report."""
cleared_file = HiddenField('cleared_file')
|
flexible
|
{
"blob_id": "47b2857ac20e46897cc1f64371868ce5174799d6",
"index": 4790,
"step-1": "<mask token>\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n cleared_file = HiddenField('cleared_file')\n",
"step-2": "<mask token>\n\n\nclass EditUserForm(UserBaseForm):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def validate_email(form, field):\n \"\"\"Make sure email is not in use\n unless it's the current user's email.\"\"\"\n user = User.query.filter_by(email=form.email.data).first()\n if user and not user == g.user:\n form.email.errors = ['Email already associated with account!',\n *form.email.errors]\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username is not in use\n unless it's the current user's username.\"\"\"\n user = User.query.filter_by(username=form.username.data).first()\n if user and not user == g.user:\n form.username.errors = ['Username already taken!', *form.\n username.errors]\n raise ValidationError\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email', validators=[InputRequired(message=\n 'Email cannot be blank.'), Length(min=5, max=320), Email(\n check_deliverability=True, message='Invalid Email address')])\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60)])\n\n\nclass ReportBaseForm(FlaskForm):\n \"\"\"Form for adding new report.\"\"\"\n text = TextAreaField('Report', validators=[InputRequired(message=\n 'Report cannot be blank.'), Length(min=2)])\n photo_url = URLField('Photo URL', validators=[URL(), Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image.\"\"\"\n )\n photo_file = FileField('Upload Photo', validators=[Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image. 4MB max.\"\"\"\n )\n\n def validate(self):\n if not super().validate():\n return False\n if self.photo_url.data and self.photo_file.data:\n msg = 'Please specify Photo URL or upload a photo, not both'\n self.photo_url.errors.append(msg)\n self.photo_file.errors.append(msg)\n return False\n return True\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n cleared_file = HiddenField('cleared_file')\n",
"step-3": "<mask token>\n\n\nclass AddUserForm(UserBaseForm):\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60), Regexp(\n '^(?=.*[A-Za-z])(?=.*\\\\d)(?=.*[$@$!%*#?&])[A-Za-z\\\\d$@$!%*#?&]{8,}$',\n message='Please match the given requirements for password.')],\n description=\n 'Minimum one each - uppercase letter, lowercase letter, number, special character.'\n )\n\n def validate_email(form, field):\n \"\"\"Make sure email not in use.\"\"\"\n if User.query.filter_by(email=form.email.data).first():\n form.email.errors.append('Email already associated with account!')\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username not in use.\"\"\"\n if User.query.filter_by(username=form.username.data).first():\n form.username.errors.append('Username already taken!')\n raise ValidationError\n\n\nclass EditUserForm(UserBaseForm):\n \"\"\"Edit User Form.\"\"\"\n avatar_url = URLField('Avatar Image URL', validators=[Length(min=6, max\n =255), Optional()], description='Online image address')\n banner_url = URLField('Banner Image URL', validators=[Length(min=6, max\n =255), Optional()], description='Online image address')\n byline = StringField('User Byline', validators=[Length(min=2, max=200),\n Optional()], description='A short snippet shown under your username')\n bio = TextAreaField('User Bio', validators=[Length(min=2, max=500),\n Optional()], description='500 character max')\n city = StringField('City', validators=[Length(min=2, max=50), Optional()])\n state = StringField('State', validators=[Length(min=2, max=50), Optional()]\n )\n country = StringField('Country', validators=[Length(min=2, max=50),\n Optional()])\n\n def validate_email(form, field):\n \"\"\"Make sure email is not in use\n unless it's the current user's email.\"\"\"\n user = User.query.filter_by(email=form.email.data).first()\n if user and not user == g.user:\n form.email.errors = ['Email already associated with account!',\n *form.email.errors]\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username is not in use\n unless it's the current user's username.\"\"\"\n user = User.query.filter_by(username=form.username.data).first()\n if user and not user == g.user:\n form.username.errors = ['Username already taken!', *form.\n username.errors]\n raise ValidationError\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email', validators=[InputRequired(message=\n 'Email cannot be blank.'), Length(min=5, max=320), Email(\n check_deliverability=True, message='Invalid Email address')])\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60)])\n\n\nclass ReportBaseForm(FlaskForm):\n \"\"\"Form for adding new report.\"\"\"\n text = TextAreaField('Report', validators=[InputRequired(message=\n 'Report cannot be blank.'), Length(min=2)])\n photo_url = URLField('Photo URL', validators=[URL(), Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image.\"\"\"\n )\n photo_file = FileField('Upload Photo', validators=[Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image. 4MB max.\"\"\"\n )\n\n def validate(self):\n if not super().validate():\n return False\n if self.photo_url.data and self.photo_file.data:\n msg = 'Please specify Photo URL or upload a photo, not both'\n self.photo_url.errors.append(msg)\n self.photo_file.errors.append(msg)\n return False\n return True\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n cleared_file = HiddenField('cleared_file')\n",
"step-4": "<mask token>\n\n\nclass UserBaseForm(FlaskForm):\n email = EmailField('Email', validators=[InputRequired(message=\n 'Email cannot be blank.'), Length(min=5, max=320), Email(\n check_deliverability=True, message='Invalid Email address')])\n username = StringField('Username', validators=[InputRequired(message=\n 'Username cannot be blank.'), Length(min=2, max=30)])\n\n\nclass AddUserForm(UserBaseForm):\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60), Regexp(\n '^(?=.*[A-Za-z])(?=.*\\\\d)(?=.*[$@$!%*#?&])[A-Za-z\\\\d$@$!%*#?&]{8,}$',\n message='Please match the given requirements for password.')],\n description=\n 'Minimum one each - uppercase letter, lowercase letter, number, special character.'\n )\n\n def validate_email(form, field):\n \"\"\"Make sure email not in use.\"\"\"\n if User.query.filter_by(email=form.email.data).first():\n form.email.errors.append('Email already associated with account!')\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username not in use.\"\"\"\n if User.query.filter_by(username=form.username.data).first():\n form.username.errors.append('Username already taken!')\n raise ValidationError\n\n\nclass EditUserForm(UserBaseForm):\n \"\"\"Edit User Form.\"\"\"\n avatar_url = URLField('Avatar Image URL', validators=[Length(min=6, max\n =255), Optional()], description='Online image address')\n banner_url = URLField('Banner Image URL', validators=[Length(min=6, max\n =255), Optional()], description='Online image address')\n byline = StringField('User Byline', validators=[Length(min=2, max=200),\n Optional()], description='A short snippet shown under your username')\n bio = TextAreaField('User Bio', validators=[Length(min=2, max=500),\n Optional()], description='500 character max')\n city = StringField('City', validators=[Length(min=2, max=50), Optional()])\n state = StringField('State', validators=[Length(min=2, max=50), Optional()]\n )\n country = StringField('Country', validators=[Length(min=2, max=50),\n Optional()])\n\n def validate_email(form, field):\n \"\"\"Make sure email is not in use\n unless it's the current user's email.\"\"\"\n user = User.query.filter_by(email=form.email.data).first()\n if user and not user == g.user:\n form.email.errors = ['Email already associated with account!',\n *form.email.errors]\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username is not in use\n unless it's the current user's username.\"\"\"\n user = User.query.filter_by(username=form.username.data).first()\n if user and not user == g.user:\n form.username.errors = ['Username already taken!', *form.\n username.errors]\n raise ValidationError\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email', validators=[InputRequired(message=\n 'Email cannot be blank.'), Length(min=5, max=320), Email(\n check_deliverability=True, message='Invalid Email address')])\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60)])\n\n\nclass ReportBaseForm(FlaskForm):\n \"\"\"Form for adding new report.\"\"\"\n text = TextAreaField('Report', validators=[InputRequired(message=\n 'Report cannot be blank.'), Length(min=2)])\n photo_url = URLField('Photo URL', validators=[URL(), Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image.\"\"\"\n )\n photo_file = FileField('Upload Photo', validators=[Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image. 4MB max.\"\"\"\n )\n\n def validate(self):\n if not super().validate():\n return False\n if self.photo_url.data and self.photo_file.data:\n msg = 'Please specify Photo URL or upload a photo, not both'\n self.photo_url.errors.append(msg)\n self.photo_file.errors.append(msg)\n return False\n return True\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n cleared_file = HiddenField('cleared_file')\n",
"step-5": "from flask_wtf import FlaskForm\nfrom wtforms import (\n StringField, TextAreaField, PasswordField, HiddenField)\nfrom wtforms.fields.html5 import URLField, EmailField\nfrom flask_wtf.file import FileField\nfrom wtforms.validators import (\n InputRequired, Length, Email,\n Optional, URL, ValidationError, Regexp)\nfrom models import User\nfrom flask import g\n\n\nclass UserBaseForm(FlaskForm):\n email = EmailField(\"Email\", validators=[\n InputRequired(message=\"Email cannot be blank.\"),\n Length(min=5, max=320),\n Email(check_deliverability=True,\n message=\"Invalid Email address\")])\n\n username = StringField(\"Username\", validators=[\n InputRequired(message=\"Username cannot be blank.\"),\n Length(min=2, max=30)])\n\n\nclass AddUserForm(UserBaseForm):\n\n password = PasswordField(\"Password\", validators=[\n InputRequired(message=\"Password cannot be blank.\"),\n Length(min=8, max=60),\n Regexp(\"^(?=.*[A-Za-z])(?=.*\\d)(?=.*[$@$!%*#?&])[A-Za-z\\d$@$!%*#?&]{8,}$\", message='Please match the given requirements for password.')], # noqa e501\n description=\"Minimum one each - uppercase letter, lowercase letter, number, special character.\") # noqa e501\n\n def validate_email(form, field):\n \"\"\"Make sure email not in use.\"\"\"\n if User.query.filter_by(email=form.email.data).first():\n form.email.errors.append(\n \"Email already associated with account!\")\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username not in use.\"\"\"\n if User.query.filter_by(username=form.username.data).first():\n form.username.errors.append(\"Username already taken!\")\n raise ValidationError\n\n\nclass EditUserForm(UserBaseForm):\n \"\"\"Edit User Form.\"\"\"\n\n avatar_url = URLField(\"Avatar Image URL\", validators=[\n Length(min=6, max=255), Optional()],\n description=\"Online image address\")\n\n banner_url = URLField(\"Banner Image URL\", validators=[\n Length(min=6, max=255), Optional()],\n description=\"Online image address\")\n\n byline = StringField(\"User Byline\", validators=[\n Length(min=2, max=200), Optional()],\n description=\"A short snippet shown under your username\")\n\n bio = TextAreaField(\"User Bio\", validators=[\n Length(min=2, max=500), Optional()],\n description=\"500 character max\")\n\n city = StringField(\"City\", validators=[Length(min=2, max=50), Optional()])\n\n state = StringField(\"State\", validators=[\n Length(min=2, max=50), Optional()])\n\n country = StringField(\"Country\", validators=[\n Length(min=2, max=50), Optional()])\n\n def validate_email(form, field):\n \"\"\"Make sure email is not in use\n unless it's the current user's email.\"\"\"\n\n user = User.query.filter_by(email=form.email.data).first()\n\n if user and not user == g.user:\n form.email.errors = [\n \"Email already associated with account!\",\n *form.email.errors\n ]\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username is not in use\n unless it's the current user's username.\"\"\"\n\n user = User.query.filter_by(username=form.username.data).first()\n\n if user and not user == g.user:\n form.username.errors = [\n \"Username already taken!\",\n *form.username.errors\n ]\n raise ValidationError\n\n\nclass LoginForm(FlaskForm):\n email = EmailField(\"Email\", validators=[\n InputRequired(message=\"Email cannot be blank.\"),\n Length(min=5, max=320),\n Email(check_deliverability=True,\n message=\"Invalid Email address\")])\n\n password = PasswordField(\"Password\", validators=[\n InputRequired(\n message=\"Password cannot be blank.\"),\n Length(min=8, max=60)])\n\n\nclass ReportBaseForm(FlaskForm):\n \"\"\"Form for adding new report.\"\"\"\n\n text = TextAreaField(\"Report\", validators=[\n InputRequired(message=\"Report cannot be blank.\"),\n Length(min=2)])\n\n photo_url = URLField(\n \"Photo URL\", validators=[URL(), Optional()],\n description=\"\"\"\n Either enter a photo URL or\n choose an image file to include an image.\"\"\")\n\n photo_file = FileField(\n \"Upload Photo\", validators=[Optional()],\n description=\"\"\"\n Either enter a photo URL or\n choose an image file to include an image. 4MB max.\"\"\")\n\n def validate(self):\n if not super().validate():\n return False\n if self.photo_url.data and self.photo_file.data:\n msg = 'Please specify Photo URL or upload a photo, not both'\n self.photo_url.errors.append(msg)\n self.photo_file.errors.append(msg)\n return False\n return True\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n\n cleared_file = HiddenField('cleared_file')\n",
"step-ids": [
5,
14,
20,
22,
24
]
}
|
[
5,
14,
20,
22,
24
] |
def search4vowels(word):
""" Return sny vowels founded in a supplied word."""
vowels = set('aeiou')
found = vowels.intersection(set(word))
#return found
for vowels in found:
print(vowels)
|
normal
|
{
"blob_id": "8a21a7005fb17cc82759079022b540cf4fd062c5",
"index": 3458,
"step-1": "<mask token>\n",
"step-2": "def search4vowels(word):\n \"\"\" Return sny vowels founded in a supplied word.\"\"\"\n vowels = set('aeiou')\n found = vowels.intersection(set(word))\n for vowels in found:\n print(vowels)\n",
"step-3": "def search4vowels(word):\n \"\"\" Return sny vowels founded in a supplied word.\"\"\"\n vowels = set('aeiou')\n found = vowels.intersection(set(word))\n #return found\n for vowels in found:\n print(vowels)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# 玩家(攻击力)攻击敌人(血量)敌人受伤(减血)可能死亡(播放动画)
# 敌人攻击玩家 玩家受伤(减血 碎屏) 可能死亡(游戏结束)
# class Player:
# def __init__(self,name,hp,atk):
# self.name = name
# self.hp = hp
# self.atk = atk
#
# @property
# def hp(self):
# return self.__hp
# @hp.setter
# def hp(self,value):
# if 0<=value<=100:
# self.__hp = value
# else:
# raise ValueError('血量不在区间内')
#
# @property
# def atk(self):
# return self.__atk
#
# @atk.setter
# def atk(self, value):
# if 0 <= value <= 50:
# self.__atk = value
# else:
# raise ValueError('攻击力不在区间内')
#
#
# class Enemy:
# def __init__(self, e_name, e_hp, e_atk):
# self.e_name = e_name
# self.e_hp = e_hp
# self.e_atk = e_atk
#
# @property
# def e_hp(self):
# return self.__e_hp
#
# @e_hp.setter
# def e_hp(self, value):
# if 0 <= value <= 100:
# self.__e_hp = value
# else:
# raise ValueError('血量不在区间内')
#
# @property
# def e_atk(self):
# return self.__e_atk
#
# @e_atk.setter
# def e_atk(self, value):
# if 0 <= value <= 20:
# self.__e_atk = value
# else:
# raise ValueError('攻击力不在区间内')
#
#
#
# p1 = Player('悟空',100,20)
# e1 = Enemy('妖怪',40,10)
#
# #1.玩家(攻击力)攻击敌人(血量)敌人受伤(减血)可能死亡(播放动画)
# print('1.玩家攻击敌人:')
# def p_atk_e():
# count = 0
# while True:
# e1.e_hp -= p1.atk
# count += 1
# if e1.e_hp >0:
# print('玩家攻击%d次,敌人血量减少到%d' %
# (count,e1.e_hp))
# elif e1.e_hp == 0:
# print('玩家攻击%d次,敌人死亡,播放动画' % count)
# break
#
# p_atk_e()
#
# # 2.敌人攻击玩家 玩家受伤(减血 碎屏) 可能死亡(游戏结束)
# print('2.敌人攻击玩家:')
# def e_atk_p():
# count = 0
# while True:
# p1.hp -= e1.e_atk
# count += 1
# if p1.hp >0:
# print('敌人攻击%d次,玩家血量减少到%d' %
# (count,p1.hp))
# elif p1.hp == 0:
# print('敌人攻击%d次,玩家死亡,游戏结束' % count)
# break
# e_atk_p()
#玩家类
class Player:
def __init__(self,hp = 100,atk = 100):
self.hp = hp
self.atk = atk
def attack(self,enemy):
print('电脑:玩家攻击敌人')
enemy.damage(self.atk)
def damage(self,value):
print('玩家:我去')
#敌人减血
self.hp -= value
#可能死亡
if self.hp <= 0:
print('敌人:你真菜')
#敌人类
class Enemy:
def __init__(self,hp = 100,atk = 99):
self.hp = hp
self.atk = atk
def damage(self,value):
print('敌人:啊')
#玩家减血
self.hp -= value
#可能死亡
if self.hp <= 0:
print('电脑:敌人死亡,播放动画')
def attack(self,player):
print('电脑:敌人攻击玩家')
player.damage(self.atk)
p01 = Player()
e01 = Enemy()
p01.attack(e01)
e01.attack(p01)
e01.attack(p01)
|
normal
|
{
"blob_id": "3065c87f79433e9fbbd2ff45c2915dfd5b1fa7cc",
"index": 8427,
"step-1": "class Player:\n\n def __init__(self, hp=100, atk=100):\n self.hp = hp\n self.atk = atk\n <mask token>\n <mask token>\n\n\nclass Enemy:\n\n def __init__(self, hp=100, atk=99):\n self.hp = hp\n self.atk = atk\n\n def damage(self, value):\n print('敌人:啊')\n self.hp -= value\n if self.hp <= 0:\n print('电脑:敌人死亡,播放动画')\n\n def attack(self, player):\n print('电脑:敌人攻击玩家')\n player.damage(self.atk)\n\n\n<mask token>\n",
"step-2": "class Player:\n\n def __init__(self, hp=100, atk=100):\n self.hp = hp\n self.atk = atk\n\n def attack(self, enemy):\n print('电脑:玩家攻击敌人')\n enemy.damage(self.atk)\n <mask token>\n\n\nclass Enemy:\n\n def __init__(self, hp=100, atk=99):\n self.hp = hp\n self.atk = atk\n\n def damage(self, value):\n print('敌人:啊')\n self.hp -= value\n if self.hp <= 0:\n print('电脑:敌人死亡,播放动画')\n\n def attack(self, player):\n print('电脑:敌人攻击玩家')\n player.damage(self.atk)\n\n\n<mask token>\n",
"step-3": "class Player:\n\n def __init__(self, hp=100, atk=100):\n self.hp = hp\n self.atk = atk\n\n def attack(self, enemy):\n print('电脑:玩家攻击敌人')\n enemy.damage(self.atk)\n\n def damage(self, value):\n print('玩家:我去')\n self.hp -= value\n if self.hp <= 0:\n print('敌人:你真菜')\n\n\nclass Enemy:\n\n def __init__(self, hp=100, atk=99):\n self.hp = hp\n self.atk = atk\n\n def damage(self, value):\n print('敌人:啊')\n self.hp -= value\n if self.hp <= 0:\n print('电脑:敌人死亡,播放动画')\n\n def attack(self, player):\n print('电脑:敌人攻击玩家')\n player.damage(self.atk)\n\n\n<mask token>\n",
"step-4": "class Player:\n\n def __init__(self, hp=100, atk=100):\n self.hp = hp\n self.atk = atk\n\n def attack(self, enemy):\n print('电脑:玩家攻击敌人')\n enemy.damage(self.atk)\n\n def damage(self, value):\n print('玩家:我去')\n self.hp -= value\n if self.hp <= 0:\n print('敌人:你真菜')\n\n\nclass Enemy:\n\n def __init__(self, hp=100, atk=99):\n self.hp = hp\n self.atk = atk\n\n def damage(self, value):\n print('敌人:啊')\n self.hp -= value\n if self.hp <= 0:\n print('电脑:敌人死亡,播放动画')\n\n def attack(self, player):\n print('电脑:敌人攻击玩家')\n player.damage(self.atk)\n\n\n<mask token>\np01.attack(e01)\ne01.attack(p01)\ne01.attack(p01)\n",
"step-5": "# 玩家(攻击力)攻击敌人(血量)敌人受伤(减血)可能死亡(播放动画)\n# 敌人攻击玩家 玩家受伤(减血 碎屏) 可能死亡(游戏结束)\n\n# class Player:\n# def __init__(self,name,hp,atk):\n# self.name = name\n# self.hp = hp\n# self.atk = atk\n#\n# @property\n# def hp(self):\n# return self.__hp\n# @hp.setter\n# def hp(self,value):\n# if 0<=value<=100:\n# self.__hp = value\n# else:\n# raise ValueError('血量不在区间内')\n#\n# @property\n# def atk(self):\n# return self.__atk\n#\n# @atk.setter\n# def atk(self, value):\n# if 0 <= value <= 50:\n# self.__atk = value\n# else:\n# raise ValueError('攻击力不在区间内')\n#\n#\n# class Enemy:\n# def __init__(self, e_name, e_hp, e_atk):\n# self.e_name = e_name\n# self.e_hp = e_hp\n# self.e_atk = e_atk\n#\n# @property\n# def e_hp(self):\n# return self.__e_hp\n#\n# @e_hp.setter\n# def e_hp(self, value):\n# if 0 <= value <= 100:\n# self.__e_hp = value\n# else:\n# raise ValueError('血量不在区间内')\n#\n# @property\n# def e_atk(self):\n# return self.__e_atk\n#\n# @e_atk.setter\n# def e_atk(self, value):\n# if 0 <= value <= 20:\n# self.__e_atk = value\n# else:\n# raise ValueError('攻击力不在区间内')\n#\n#\n#\n# p1 = Player('悟空',100,20)\n# e1 = Enemy('妖怪',40,10)\n#\n# #1.玩家(攻击力)攻击敌人(血量)敌人受伤(减血)可能死亡(播放动画)\n# print('1.玩家攻击敌人:')\n# def p_atk_e():\n# count = 0\n# while True:\n# e1.e_hp -= p1.atk\n# count += 1\n# if e1.e_hp >0:\n# print('玩家攻击%d次,敌人血量减少到%d' %\n# (count,e1.e_hp))\n# elif e1.e_hp == 0:\n# print('玩家攻击%d次,敌人死亡,播放动画' % count)\n# break\n#\n# p_atk_e()\n#\n# # 2.敌人攻击玩家 玩家受伤(减血 碎屏) 可能死亡(游戏结束)\n# print('2.敌人攻击玩家:')\n# def e_atk_p():\n# count = 0\n# while True:\n# p1.hp -= e1.e_atk\n# count += 1\n# if p1.hp >0:\n# print('敌人攻击%d次,玩家血量减少到%d' %\n# (count,p1.hp))\n# elif p1.hp == 0:\n# print('敌人攻击%d次,玩家死亡,游戏结束' % count)\n# break\n# e_atk_p()\n\n\n#玩家类\nclass Player:\n def __init__(self,hp = 100,atk = 100):\n self.hp = hp\n self.atk = atk\n def attack(self,enemy):\n print('电脑:玩家攻击敌人')\n enemy.damage(self.atk)\n def damage(self,value):\n print('玩家:我去')\n #敌人减血\n self.hp -= value\n #可能死亡\n if self.hp <= 0:\n print('敌人:你真菜')\n\n#敌人类\nclass Enemy:\n def __init__(self,hp = 100,atk = 99):\n self.hp = hp\n self.atk = atk\n def damage(self,value):\n print('敌人:啊')\n #玩家减血\n self.hp -= value\n #可能死亡\n if self.hp <= 0:\n print('电脑:敌人死亡,播放动画')\n def attack(self,player):\n print('电脑:敌人攻击玩家')\n player.damage(self.atk)\n\np01 = Player()\ne01 = Enemy()\np01.attack(e01)\ne01.attack(p01)\ne01.attack(p01)\n",
"step-ids": [
6,
7,
8,
9,
11
]
}
|
[
6,
7,
8,
9,
11
] |
with open("input_trees.txt") as file:
map = file.readlines()
map = [ line.strip() for line in map ]
slopes = [(1,1), (3,1), (5,1), (7,1),(1,2)]
total = 1
for slope in slopes:
treeCount = 0
row, column = 0, 0
while row + 1 < len(map):
row += slope[1]
column += slope[0]
space = map[row][column % len(map[row])]
if space == "#":
treeCount += 1
total *= treeCount
print(total)
|
normal
|
{
"blob_id": "685fa78b9c3ec141ce1e9ab568e4ad8a0565d596",
"index": 4285,
"step-1": "<mask token>\n",
"step-2": "with open('input_trees.txt') as file:\n map = file.readlines()\n map = [line.strip() for line in map]\n<mask token>\nfor slope in slopes:\n treeCount = 0\n row, column = 0, 0\n while row + 1 < len(map):\n row += slope[1]\n column += slope[0]\n space = map[row][column % len(map[row])]\n if space == '#':\n treeCount += 1\n total *= treeCount\nprint(total)\n",
"step-3": "with open('input_trees.txt') as file:\n map = file.readlines()\n map = [line.strip() for line in map]\nslopes = [(1, 1), (3, 1), (5, 1), (7, 1), (1, 2)]\ntotal = 1\nfor slope in slopes:\n treeCount = 0\n row, column = 0, 0\n while row + 1 < len(map):\n row += slope[1]\n column += slope[0]\n space = map[row][column % len(map[row])]\n if space == '#':\n treeCount += 1\n total *= treeCount\nprint(total)\n",
"step-4": "with open(\"input_trees.txt\") as file:\n map = file.readlines()\n map = [ line.strip() for line in map ]\n\nslopes = [(1,1), (3,1), (5,1), (7,1),(1,2)]\n\ntotal = 1\n\nfor slope in slopes:\n treeCount = 0\n row, column = 0, 0\n\n while row + 1 < len(map):\n row += slope[1]\n column += slope[0]\n\n space = map[row][column % len(map[row])]\n if space == \"#\":\n treeCount += 1\n\n total *= treeCount\n\nprint(total)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def interpolate_images(baseline, image, alphas):
alphas_x = alphas[:, tf.newaxis, tf.newaxis, tf.newaxis, tf.newaxis]
baseline_x = tf.expand_dims(baseline, axis=0)
input_x = tf.expand_dims(image, axis=0)
delta = input_x - baseline_x
images = baseline_x + alphas_x * delta
return images
def compute_gradients(model, images, target_class):
with tf.GradientTape() as tape:
tape.watch(images)
raw_probs = model(images)
probs = (1 - raw_probs) * (1 - target_class) + raw_probs * target_class
gradients = tape.gradient(probs, images)
return gradients
def integral_approximation(gradients):
grads = (gradients[:-1] + gradients[1:]) / tf.constant(2.0)
return tf.math.reduce_mean(grads, axis=0)
<|reserved_special_token_0|>
def main(gcs_bucket, n_channels=5, dataset_name='b0-tensorfa-dwiqc',
model_dir='b0_tensorfa_dwiqc', dataset_seed=8, target_class=1,
confusion_class='true_pos'):
print('Setting gpu thread mode to gpu_private.')
os.environ['TF_GPU_THREAD_MODE'] = 'gpu_private'
print('Configuring distribution strategy')
use_tpu = False
try:
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu='')
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
strategy = tf.distribute.TPUStrategy(resolver)
use_tpu = True
print('TPU detected.')
print('All devices: ', tf.config.list_logical_devices('TPU'))
except ValueError:
strategy = tf.distribute.MirroredStrategy()
print('GPUs detected.')
print('Number of accelerators: ', strategy.num_replicas_in_sync)
tf.keras.mixed_precision.set_global_policy('mixed_float16')
scope = strategy.scope()
GCS_BASE_PATH = f'gs://{gcs_bucket}/{model_dir}/seed_{dataset_seed}'
GCS_SAVED_MODEL_DIR = op.join(GCS_BASE_PATH, 'saved_model')
GCS_OUTPUT_DIR = op.join(GCS_BASE_PATH, 'integrated_gradients')
fs = gcsfs.GCSFileSystem()
LOCAL_SAVED_MODEL_DIR = 'saved_model'
LOCAL_OUTPUT_DIR = 'output'
os.makedirs(LOCAL_SAVED_MODEL_DIR, exist_ok=True)
os.makedirs(LOCAL_OUTPUT_DIR, exist_ok=True)
fs.get(GCS_SAVED_MODEL_DIR, LOCAL_SAVED_MODEL_DIR, recursive=True)
GCS_DATA_PATH = f'gs://{gcs_bucket}'
GCS_ALLDATA_DIR = op.join(GCS_DATA_PATH, 'tfrecs', dataset_name, 'all-data'
)
if use_tpu:
device_alldata_dir = GCS_ALLDATA_DIR
else:
LOCAL_ALLDATA_DIR = op.join('.', 'tfrecs', dataset_name, 'all-data')
os.makedirs(LOCAL_ALLDATA_DIR, exist_ok=True)
fs.get(GCS_ALLDATA_DIR, LOCAL_ALLDATA_DIR, recursive=True)
device_alldata_dir = LOCAL_ALLDATA_DIR
volume_shape = 128, 128, 128, n_channels
element_spec = tf.TensorSpec(shape=(), dtype=tf.int64, name=None), (tf.
TensorSpec(shape=(1, 128, 128, 128, 5), dtype=tf.float32, name=None
), tf.TensorSpec(shape=(1,), dtype=tf.float32, name=None))
dataset = tf.data.experimental.load(op.join(device_alldata_dir,
confusion_class), element_spec=element_spec)
volumes = [tf.squeeze(tensor[0]) for _, tensor in dataset]
baseline = tf.zeros(shape=volume_shape, dtype=tf.float32)
print('Computing integrated gradients')
with scope:
model = tf.keras.models.load_model(LOCAL_SAVED_MODEL_DIR)
ig_attributions = [integrated_gradients(model=model, baseline=
baseline, image=volume, target_class=target_class, m_steps=128,
batch_size=1) for volume in volumes]
if target_class == 1:
postfix = 'attribution_pass'
else:
postfix = 'attribution_fail'
ig_dataset = tf.data.Dataset.from_tensor_slices(tf.stack(ig_attributions))
tf.data.experimental.save(ig_dataset, op.join(LOCAL_OUTPUT_DIR,
f'ig_{confusion_class}_{postfix}'))
affine = np.diag([1, 1, 1, 1])
volume_niftis = [{'b0': nib.Nifti1Image(volume[:, :, :, 3].numpy(),
affine), 'color_fa': nib.Nifti1Image(volume[:, :, :, :3].numpy(),
affine)} for volume in volumes]
ig_niftis = [{'b0': nib.Nifti1Image(attribution[:, :, :, 3].numpy(),
affine), 'color_fa': nib.Nifti1Image(attribution[:, :, :, :3].numpy
(), affine), 'sum': nib.Nifti1Image(tf.math.reduce_sum(attribution[
:, :, :, :4], axis=-1).numpy(), affine)} for attribution in
ig_attributions]
for idx, (volume_nifti, ig_nifti) in enumerate(zip(volume_niftis,
ig_niftis)):
for key, value in volume_nifti.items():
nib.save(value, op.join(LOCAL_OUTPUT_DIR,
f'{confusion_class}_{key}_{idx}.nii.gz'))
for key, value in ig_nifti.items():
nib.save(value, op.join(LOCAL_OUTPUT_DIR,
f'{confusion_class}_{postfix}_{key}_{idx}.nii.gz'))
fs.put(LOCAL_OUTPUT_DIR, GCS_OUTPUT_DIR, recursive=True)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def interpolate_images(baseline, image, alphas):
alphas_x = alphas[:, tf.newaxis, tf.newaxis, tf.newaxis, tf.newaxis]
baseline_x = tf.expand_dims(baseline, axis=0)
input_x = tf.expand_dims(image, axis=0)
delta = input_x - baseline_x
images = baseline_x + alphas_x * delta
return images
def compute_gradients(model, images, target_class):
with tf.GradientTape() as tape:
tape.watch(images)
raw_probs = model(images)
probs = (1 - raw_probs) * (1 - target_class) + raw_probs * target_class
gradients = tape.gradient(probs, images)
return gradients
def integral_approximation(gradients):
grads = (gradients[:-1] + gradients[1:]) / tf.constant(2.0)
return tf.math.reduce_mean(grads, axis=0)
@tf.function
def integrated_gradients(model, baseline, image, target_class, m_steps=50,
batch_size=32):
alphas = tf.linspace(start=0.0, stop=1.0, num=m_steps + 1)
gradient_batches = tf.TensorArray(tf.float32, size=m_steps + 1)
for alpha in tf.range(0, len(alphas), batch_size):
from_ = alpha
to = tf.minimum(from_ + batch_size, len(alphas))
alpha_batch = alphas[from_:to]
interpolated_path_input_batch = interpolate_images(baseline=
baseline, image=image, alphas=alpha_batch)
gradient_batch = compute_gradients(model=model, images=
interpolated_path_input_batch, target_class=target_class)
gradient_batches = gradient_batches.scatter(tf.range(from_, to),
gradient_batch)
total_gradients = gradient_batches.stack()
avg_gradients = integral_approximation(gradients=total_gradients)
return (image - baseline) * avg_gradients
def main(gcs_bucket, n_channels=5, dataset_name='b0-tensorfa-dwiqc',
model_dir='b0_tensorfa_dwiqc', dataset_seed=8, target_class=1,
confusion_class='true_pos'):
print('Setting gpu thread mode to gpu_private.')
os.environ['TF_GPU_THREAD_MODE'] = 'gpu_private'
print('Configuring distribution strategy')
use_tpu = False
try:
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu='')
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
strategy = tf.distribute.TPUStrategy(resolver)
use_tpu = True
print('TPU detected.')
print('All devices: ', tf.config.list_logical_devices('TPU'))
except ValueError:
strategy = tf.distribute.MirroredStrategy()
print('GPUs detected.')
print('Number of accelerators: ', strategy.num_replicas_in_sync)
tf.keras.mixed_precision.set_global_policy('mixed_float16')
scope = strategy.scope()
GCS_BASE_PATH = f'gs://{gcs_bucket}/{model_dir}/seed_{dataset_seed}'
GCS_SAVED_MODEL_DIR = op.join(GCS_BASE_PATH, 'saved_model')
GCS_OUTPUT_DIR = op.join(GCS_BASE_PATH, 'integrated_gradients')
fs = gcsfs.GCSFileSystem()
LOCAL_SAVED_MODEL_DIR = 'saved_model'
LOCAL_OUTPUT_DIR = 'output'
os.makedirs(LOCAL_SAVED_MODEL_DIR, exist_ok=True)
os.makedirs(LOCAL_OUTPUT_DIR, exist_ok=True)
fs.get(GCS_SAVED_MODEL_DIR, LOCAL_SAVED_MODEL_DIR, recursive=True)
GCS_DATA_PATH = f'gs://{gcs_bucket}'
GCS_ALLDATA_DIR = op.join(GCS_DATA_PATH, 'tfrecs', dataset_name, 'all-data'
)
if use_tpu:
device_alldata_dir = GCS_ALLDATA_DIR
else:
LOCAL_ALLDATA_DIR = op.join('.', 'tfrecs', dataset_name, 'all-data')
os.makedirs(LOCAL_ALLDATA_DIR, exist_ok=True)
fs.get(GCS_ALLDATA_DIR, LOCAL_ALLDATA_DIR, recursive=True)
device_alldata_dir = LOCAL_ALLDATA_DIR
volume_shape = 128, 128, 128, n_channels
element_spec = tf.TensorSpec(shape=(), dtype=tf.int64, name=None), (tf.
TensorSpec(shape=(1, 128, 128, 128, 5), dtype=tf.float32, name=None
), tf.TensorSpec(shape=(1,), dtype=tf.float32, name=None))
dataset = tf.data.experimental.load(op.join(device_alldata_dir,
confusion_class), element_spec=element_spec)
volumes = [tf.squeeze(tensor[0]) for _, tensor in dataset]
baseline = tf.zeros(shape=volume_shape, dtype=tf.float32)
print('Computing integrated gradients')
with scope:
model = tf.keras.models.load_model(LOCAL_SAVED_MODEL_DIR)
ig_attributions = [integrated_gradients(model=model, baseline=
baseline, image=volume, target_class=target_class, m_steps=128,
batch_size=1) for volume in volumes]
if target_class == 1:
postfix = 'attribution_pass'
else:
postfix = 'attribution_fail'
ig_dataset = tf.data.Dataset.from_tensor_slices(tf.stack(ig_attributions))
tf.data.experimental.save(ig_dataset, op.join(LOCAL_OUTPUT_DIR,
f'ig_{confusion_class}_{postfix}'))
affine = np.diag([1, 1, 1, 1])
volume_niftis = [{'b0': nib.Nifti1Image(volume[:, :, :, 3].numpy(),
affine), 'color_fa': nib.Nifti1Image(volume[:, :, :, :3].numpy(),
affine)} for volume in volumes]
ig_niftis = [{'b0': nib.Nifti1Image(attribution[:, :, :, 3].numpy(),
affine), 'color_fa': nib.Nifti1Image(attribution[:, :, :, :3].numpy
(), affine), 'sum': nib.Nifti1Image(tf.math.reduce_sum(attribution[
:, :, :, :4], axis=-1).numpy(), affine)} for attribution in
ig_attributions]
for idx, (volume_nifti, ig_nifti) in enumerate(zip(volume_niftis,
ig_niftis)):
for key, value in volume_nifti.items():
nib.save(value, op.join(LOCAL_OUTPUT_DIR,
f'{confusion_class}_{key}_{idx}.nii.gz'))
for key, value in ig_nifti.items():
nib.save(value, op.join(LOCAL_OUTPUT_DIR,
f'{confusion_class}_{postfix}_{key}_{idx}.nii.gz'))
fs.put(LOCAL_OUTPUT_DIR, GCS_OUTPUT_DIR, recursive=True)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def interpolate_images(baseline, image, alphas):
alphas_x = alphas[:, tf.newaxis, tf.newaxis, tf.newaxis, tf.newaxis]
baseline_x = tf.expand_dims(baseline, axis=0)
input_x = tf.expand_dims(image, axis=0)
delta = input_x - baseline_x
images = baseline_x + alphas_x * delta
return images
def compute_gradients(model, images, target_class):
with tf.GradientTape() as tape:
tape.watch(images)
raw_probs = model(images)
probs = (1 - raw_probs) * (1 - target_class) + raw_probs * target_class
gradients = tape.gradient(probs, images)
return gradients
def integral_approximation(gradients):
grads = (gradients[:-1] + gradients[1:]) / tf.constant(2.0)
return tf.math.reduce_mean(grads, axis=0)
@tf.function
def integrated_gradients(model, baseline, image, target_class, m_steps=50,
batch_size=32):
alphas = tf.linspace(start=0.0, stop=1.0, num=m_steps + 1)
gradient_batches = tf.TensorArray(tf.float32, size=m_steps + 1)
for alpha in tf.range(0, len(alphas), batch_size):
from_ = alpha
to = tf.minimum(from_ + batch_size, len(alphas))
alpha_batch = alphas[from_:to]
interpolated_path_input_batch = interpolate_images(baseline=
baseline, image=image, alphas=alpha_batch)
gradient_batch = compute_gradients(model=model, images=
interpolated_path_input_batch, target_class=target_class)
gradient_batches = gradient_batches.scatter(tf.range(from_, to),
gradient_batch)
total_gradients = gradient_batches.stack()
avg_gradients = integral_approximation(gradients=total_gradients)
return (image - baseline) * avg_gradients
def main(gcs_bucket, n_channels=5, dataset_name='b0-tensorfa-dwiqc',
model_dir='b0_tensorfa_dwiqc', dataset_seed=8, target_class=1,
confusion_class='true_pos'):
print('Setting gpu thread mode to gpu_private.')
os.environ['TF_GPU_THREAD_MODE'] = 'gpu_private'
print('Configuring distribution strategy')
use_tpu = False
try:
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu='')
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
strategy = tf.distribute.TPUStrategy(resolver)
use_tpu = True
print('TPU detected.')
print('All devices: ', tf.config.list_logical_devices('TPU'))
except ValueError:
strategy = tf.distribute.MirroredStrategy()
print('GPUs detected.')
print('Number of accelerators: ', strategy.num_replicas_in_sync)
tf.keras.mixed_precision.set_global_policy('mixed_float16')
scope = strategy.scope()
GCS_BASE_PATH = f'gs://{gcs_bucket}/{model_dir}/seed_{dataset_seed}'
GCS_SAVED_MODEL_DIR = op.join(GCS_BASE_PATH, 'saved_model')
GCS_OUTPUT_DIR = op.join(GCS_BASE_PATH, 'integrated_gradients')
fs = gcsfs.GCSFileSystem()
LOCAL_SAVED_MODEL_DIR = 'saved_model'
LOCAL_OUTPUT_DIR = 'output'
os.makedirs(LOCAL_SAVED_MODEL_DIR, exist_ok=True)
os.makedirs(LOCAL_OUTPUT_DIR, exist_ok=True)
fs.get(GCS_SAVED_MODEL_DIR, LOCAL_SAVED_MODEL_DIR, recursive=True)
GCS_DATA_PATH = f'gs://{gcs_bucket}'
GCS_ALLDATA_DIR = op.join(GCS_DATA_PATH, 'tfrecs', dataset_name, 'all-data'
)
if use_tpu:
device_alldata_dir = GCS_ALLDATA_DIR
else:
LOCAL_ALLDATA_DIR = op.join('.', 'tfrecs', dataset_name, 'all-data')
os.makedirs(LOCAL_ALLDATA_DIR, exist_ok=True)
fs.get(GCS_ALLDATA_DIR, LOCAL_ALLDATA_DIR, recursive=True)
device_alldata_dir = LOCAL_ALLDATA_DIR
volume_shape = 128, 128, 128, n_channels
element_spec = tf.TensorSpec(shape=(), dtype=tf.int64, name=None), (tf.
TensorSpec(shape=(1, 128, 128, 128, 5), dtype=tf.float32, name=None
), tf.TensorSpec(shape=(1,), dtype=tf.float32, name=None))
dataset = tf.data.experimental.load(op.join(device_alldata_dir,
confusion_class), element_spec=element_spec)
volumes = [tf.squeeze(tensor[0]) for _, tensor in dataset]
baseline = tf.zeros(shape=volume_shape, dtype=tf.float32)
print('Computing integrated gradients')
with scope:
model = tf.keras.models.load_model(LOCAL_SAVED_MODEL_DIR)
ig_attributions = [integrated_gradients(model=model, baseline=
baseline, image=volume, target_class=target_class, m_steps=128,
batch_size=1) for volume in volumes]
if target_class == 1:
postfix = 'attribution_pass'
else:
postfix = 'attribution_fail'
ig_dataset = tf.data.Dataset.from_tensor_slices(tf.stack(ig_attributions))
tf.data.experimental.save(ig_dataset, op.join(LOCAL_OUTPUT_DIR,
f'ig_{confusion_class}_{postfix}'))
affine = np.diag([1, 1, 1, 1])
volume_niftis = [{'b0': nib.Nifti1Image(volume[:, :, :, 3].numpy(),
affine), 'color_fa': nib.Nifti1Image(volume[:, :, :, :3].numpy(),
affine)} for volume in volumes]
ig_niftis = [{'b0': nib.Nifti1Image(attribution[:, :, :, 3].numpy(),
affine), 'color_fa': nib.Nifti1Image(attribution[:, :, :, :3].numpy
(), affine), 'sum': nib.Nifti1Image(tf.math.reduce_sum(attribution[
:, :, :, :4], axis=-1).numpy(), affine)} for attribution in
ig_attributions]
for idx, (volume_nifti, ig_nifti) in enumerate(zip(volume_niftis,
ig_niftis)):
for key, value in volume_nifti.items():
nib.save(value, op.join(LOCAL_OUTPUT_DIR,
f'{confusion_class}_{key}_{idx}.nii.gz'))
for key, value in ig_nifti.items():
nib.save(value, op.join(LOCAL_OUTPUT_DIR,
f'{confusion_class}_{postfix}_{key}_{idx}.nii.gz'))
fs.put(LOCAL_OUTPUT_DIR, GCS_OUTPUT_DIR, recursive=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gcs_bucket', type=str, help=
'The name of the gcs bucket that will contain the saved models, checkpoints, etc.'
)
parser.add_argument('--n_channels', type=int, help=
'The number of channels in the data.', default=5)
parser.add_argument('--dataset_name', type=str, help=
'The name of the dataset in the tfrecs folder of the GCS bucket.',
default='b0-tensorfa-dwiqc')
parser.add_argument('--model_dir', type=str, help=
'The name of the GCS directory in which the tensorflow model is saved.'
, default='b0_tensorfa_dwiqc')
parser.add_argument('--dataset_seed', type=int, help=
'The seed for the dataset', default=8)
parser.add_argument('--target_class', type=int, help=
'The target class for the integrated gradients.', default=1)
parser.add_argument('--confusion_class', type=str, help=
'The confusion class for which to compute integrated gradients',
default='true_pos')
args = parser.parse_args()
main(gcs_bucket=args.gcs_bucket, n_channels=args.n_channels,
dataset_name=args.dataset_name, model_dir=args.model_dir,
dataset_seed=args.dataset_seed, target_class=args.target_class,
confusion_class=args.confusion_class)
<|reserved_special_token_1|>
import argparse
import gc
import gcsfs
import nibabel as nib
import nilearn
import nobrainer
import numpy as np
import os
import os.path as op
import pandas as pd
import tensorflow as tf
def interpolate_images(baseline, image, alphas):
alphas_x = alphas[:, tf.newaxis, tf.newaxis, tf.newaxis, tf.newaxis]
baseline_x = tf.expand_dims(baseline, axis=0)
input_x = tf.expand_dims(image, axis=0)
delta = input_x - baseline_x
images = baseline_x + alphas_x * delta
return images
def compute_gradients(model, images, target_class):
with tf.GradientTape() as tape:
tape.watch(images)
raw_probs = model(images)
probs = (1 - raw_probs) * (1 - target_class) + raw_probs * target_class
gradients = tape.gradient(probs, images)
return gradients
def integral_approximation(gradients):
grads = (gradients[:-1] + gradients[1:]) / tf.constant(2.0)
return tf.math.reduce_mean(grads, axis=0)
@tf.function
def integrated_gradients(model, baseline, image, target_class, m_steps=50,
batch_size=32):
alphas = tf.linspace(start=0.0, stop=1.0, num=m_steps + 1)
gradient_batches = tf.TensorArray(tf.float32, size=m_steps + 1)
for alpha in tf.range(0, len(alphas), batch_size):
from_ = alpha
to = tf.minimum(from_ + batch_size, len(alphas))
alpha_batch = alphas[from_:to]
interpolated_path_input_batch = interpolate_images(baseline=
baseline, image=image, alphas=alpha_batch)
gradient_batch = compute_gradients(model=model, images=
interpolated_path_input_batch, target_class=target_class)
gradient_batches = gradient_batches.scatter(tf.range(from_, to),
gradient_batch)
total_gradients = gradient_batches.stack()
avg_gradients = integral_approximation(gradients=total_gradients)
return (image - baseline) * avg_gradients
def main(gcs_bucket, n_channels=5, dataset_name='b0-tensorfa-dwiqc',
model_dir='b0_tensorfa_dwiqc', dataset_seed=8, target_class=1,
confusion_class='true_pos'):
print('Setting gpu thread mode to gpu_private.')
os.environ['TF_GPU_THREAD_MODE'] = 'gpu_private'
print('Configuring distribution strategy')
use_tpu = False
try:
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu='')
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
strategy = tf.distribute.TPUStrategy(resolver)
use_tpu = True
print('TPU detected.')
print('All devices: ', tf.config.list_logical_devices('TPU'))
except ValueError:
strategy = tf.distribute.MirroredStrategy()
print('GPUs detected.')
print('Number of accelerators: ', strategy.num_replicas_in_sync)
tf.keras.mixed_precision.set_global_policy('mixed_float16')
scope = strategy.scope()
GCS_BASE_PATH = f'gs://{gcs_bucket}/{model_dir}/seed_{dataset_seed}'
GCS_SAVED_MODEL_DIR = op.join(GCS_BASE_PATH, 'saved_model')
GCS_OUTPUT_DIR = op.join(GCS_BASE_PATH, 'integrated_gradients')
fs = gcsfs.GCSFileSystem()
LOCAL_SAVED_MODEL_DIR = 'saved_model'
LOCAL_OUTPUT_DIR = 'output'
os.makedirs(LOCAL_SAVED_MODEL_DIR, exist_ok=True)
os.makedirs(LOCAL_OUTPUT_DIR, exist_ok=True)
fs.get(GCS_SAVED_MODEL_DIR, LOCAL_SAVED_MODEL_DIR, recursive=True)
GCS_DATA_PATH = f'gs://{gcs_bucket}'
GCS_ALLDATA_DIR = op.join(GCS_DATA_PATH, 'tfrecs', dataset_name, 'all-data'
)
if use_tpu:
device_alldata_dir = GCS_ALLDATA_DIR
else:
LOCAL_ALLDATA_DIR = op.join('.', 'tfrecs', dataset_name, 'all-data')
os.makedirs(LOCAL_ALLDATA_DIR, exist_ok=True)
fs.get(GCS_ALLDATA_DIR, LOCAL_ALLDATA_DIR, recursive=True)
device_alldata_dir = LOCAL_ALLDATA_DIR
volume_shape = 128, 128, 128, n_channels
element_spec = tf.TensorSpec(shape=(), dtype=tf.int64, name=None), (tf.
TensorSpec(shape=(1, 128, 128, 128, 5), dtype=tf.float32, name=None
), tf.TensorSpec(shape=(1,), dtype=tf.float32, name=None))
dataset = tf.data.experimental.load(op.join(device_alldata_dir,
confusion_class), element_spec=element_spec)
volumes = [tf.squeeze(tensor[0]) for _, tensor in dataset]
baseline = tf.zeros(shape=volume_shape, dtype=tf.float32)
print('Computing integrated gradients')
with scope:
model = tf.keras.models.load_model(LOCAL_SAVED_MODEL_DIR)
ig_attributions = [integrated_gradients(model=model, baseline=
baseline, image=volume, target_class=target_class, m_steps=128,
batch_size=1) for volume in volumes]
if target_class == 1:
postfix = 'attribution_pass'
else:
postfix = 'attribution_fail'
ig_dataset = tf.data.Dataset.from_tensor_slices(tf.stack(ig_attributions))
tf.data.experimental.save(ig_dataset, op.join(LOCAL_OUTPUT_DIR,
f'ig_{confusion_class}_{postfix}'))
affine = np.diag([1, 1, 1, 1])
volume_niftis = [{'b0': nib.Nifti1Image(volume[:, :, :, 3].numpy(),
affine), 'color_fa': nib.Nifti1Image(volume[:, :, :, :3].numpy(),
affine)} for volume in volumes]
ig_niftis = [{'b0': nib.Nifti1Image(attribution[:, :, :, 3].numpy(),
affine), 'color_fa': nib.Nifti1Image(attribution[:, :, :, :3].numpy
(), affine), 'sum': nib.Nifti1Image(tf.math.reduce_sum(attribution[
:, :, :, :4], axis=-1).numpy(), affine)} for attribution in
ig_attributions]
for idx, (volume_nifti, ig_nifti) in enumerate(zip(volume_niftis,
ig_niftis)):
for key, value in volume_nifti.items():
nib.save(value, op.join(LOCAL_OUTPUT_DIR,
f'{confusion_class}_{key}_{idx}.nii.gz'))
for key, value in ig_nifti.items():
nib.save(value, op.join(LOCAL_OUTPUT_DIR,
f'{confusion_class}_{postfix}_{key}_{idx}.nii.gz'))
fs.put(LOCAL_OUTPUT_DIR, GCS_OUTPUT_DIR, recursive=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gcs_bucket', type=str, help=
'The name of the gcs bucket that will contain the saved models, checkpoints, etc.'
)
parser.add_argument('--n_channels', type=int, help=
'The number of channels in the data.', default=5)
parser.add_argument('--dataset_name', type=str, help=
'The name of the dataset in the tfrecs folder of the GCS bucket.',
default='b0-tensorfa-dwiqc')
parser.add_argument('--model_dir', type=str, help=
'The name of the GCS directory in which the tensorflow model is saved.'
, default='b0_tensorfa_dwiqc')
parser.add_argument('--dataset_seed', type=int, help=
'The seed for the dataset', default=8)
parser.add_argument('--target_class', type=int, help=
'The target class for the integrated gradients.', default=1)
parser.add_argument('--confusion_class', type=str, help=
'The confusion class for which to compute integrated gradients',
default='true_pos')
args = parser.parse_args()
main(gcs_bucket=args.gcs_bucket, n_channels=args.n_channels,
dataset_name=args.dataset_name, model_dir=args.model_dir,
dataset_seed=args.dataset_seed, target_class=args.target_class,
confusion_class=args.confusion_class)
<|reserved_special_token_1|>
import argparse
import gc
import gcsfs
import nibabel as nib
import nilearn
import nobrainer
import numpy as np
import os
import os.path as op
import pandas as pd
import tensorflow as tf
def interpolate_images(baseline, image, alphas):
alphas_x = alphas[:, tf.newaxis, tf.newaxis, tf.newaxis, tf.newaxis]
baseline_x = tf.expand_dims(baseline, axis=0)
input_x = tf.expand_dims(image, axis=0)
delta = input_x - baseline_x
images = baseline_x + alphas_x * delta
return images
def compute_gradients(model, images, target_class):
with tf.GradientTape() as tape:
tape.watch(images)
raw_probs = model(images)
probs = (1 - raw_probs) * (1 - target_class) + raw_probs * target_class
gradients = tape.gradient(probs, images)
return gradients
def integral_approximation(gradients):
# riemann_trapezoidal
grads = (gradients[:-1] + gradients[1:]) / tf.constant(2.0)
return tf.math.reduce_mean(grads, axis=0)
@tf.function
def integrated_gradients(
model, baseline, image, target_class, m_steps=50, batch_size=32
):
# 1. Generate alphas.
alphas = tf.linspace(start=0.0, stop=1.0, num=m_steps + 1)
# Initialize TensorArray outside loop to collect gradients.
gradient_batches = tf.TensorArray(tf.float32, size=m_steps + 1)
# Iterate alphas range and batch computation for speed, memory efficiency, and scaling to larger m_steps.
for alpha in tf.range(0, len(alphas), batch_size):
from_ = alpha
to = tf.minimum(from_ + batch_size, len(alphas))
alpha_batch = alphas[from_:to]
# 2. Generate interpolated inputs between baseline and input.
interpolated_path_input_batch = interpolate_images(
baseline=baseline, image=image, alphas=alpha_batch
)
# 3. Compute gradients between model outputs and interpolated inputs.
gradient_batch = compute_gradients(
model=model,
images=interpolated_path_input_batch,
target_class=target_class,
)
# Write batch indices and gradients to extend TensorArray.
gradient_batches = gradient_batches.scatter(tf.range(from_, to), gradient_batch)
# Stack path gradients together row-wise into single tensor.
total_gradients = gradient_batches.stack()
# 4. Integral approximation through averaging gradients.
avg_gradients = integral_approximation(gradients=total_gradients)
# 5. Scale integrated gradients with respect to input.
return (image - baseline) * avg_gradients
def main(
gcs_bucket,
n_channels=5,
dataset_name="b0-tensorfa-dwiqc",
model_dir="b0_tensorfa_dwiqc",
dataset_seed=8,
target_class=1,
confusion_class="true_pos",
):
print("Setting gpu thread mode to gpu_private.")
os.environ["TF_GPU_THREAD_MODE"] = "gpu_private"
print("Configuring distribution strategy")
use_tpu = False
try:
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu="")
tf.config.experimental_connect_to_cluster(resolver)
# This is the TPU initialization code that has to be at the beginning.
tf.tpu.experimental.initialize_tpu_system(resolver)
strategy = tf.distribute.TPUStrategy(resolver)
use_tpu = True
print("TPU detected.")
print("All devices: ", tf.config.list_logical_devices("TPU"))
except ValueError:
strategy = tf.distribute.MirroredStrategy()
print("GPUs detected.")
print("Number of accelerators: ", strategy.num_replicas_in_sync)
# Train using mixed-precision policy
tf.keras.mixed_precision.set_global_policy("mixed_float16")
scope = strategy.scope()
# Setting location were training logs and checkpoints will be stored
GCS_BASE_PATH = f"gs://{gcs_bucket}/{model_dir}/seed_{dataset_seed}"
GCS_SAVED_MODEL_DIR = op.join(GCS_BASE_PATH, "saved_model")
GCS_OUTPUT_DIR = op.join(GCS_BASE_PATH, "integrated_gradients")
fs = gcsfs.GCSFileSystem()
LOCAL_SAVED_MODEL_DIR = "saved_model"
LOCAL_OUTPUT_DIR = "output"
os.makedirs(LOCAL_SAVED_MODEL_DIR, exist_ok=True)
os.makedirs(LOCAL_OUTPUT_DIR, exist_ok=True)
fs.get(GCS_SAVED_MODEL_DIR, LOCAL_SAVED_MODEL_DIR, recursive=True)
# Specify the datasets on GCP storage
GCS_DATA_PATH = f"gs://{gcs_bucket}"
GCS_ALLDATA_DIR = op.join(GCS_DATA_PATH, "tfrecs", dataset_name, "all-data")
if use_tpu:
device_alldata_dir = GCS_ALLDATA_DIR
else:
LOCAL_ALLDATA_DIR = op.join(".", "tfrecs", dataset_name, "all-data")
os.makedirs(LOCAL_ALLDATA_DIR, exist_ok=True)
fs.get(GCS_ALLDATA_DIR, LOCAL_ALLDATA_DIR, recursive=True)
device_alldata_dir = LOCAL_ALLDATA_DIR
volume_shape = (128, 128, 128, n_channels)
element_spec = (
tf.TensorSpec(shape=(), dtype=tf.int64, name=None),
(
tf.TensorSpec(shape=(1, 128, 128, 128, 5), dtype=tf.float32, name=None),
tf.TensorSpec(shape=(1,), dtype=tf.float32, name=None),
),
)
dataset = tf.data.experimental.load(
op.join(device_alldata_dir, confusion_class),
element_spec=element_spec,
)
volumes = [tf.squeeze(tensor[0]) for _, tensor in dataset]
baseline = tf.zeros(shape=volume_shape, dtype=tf.float32)
print("Computing integrated gradients")
with scope:
model = tf.keras.models.load_model(LOCAL_SAVED_MODEL_DIR)
ig_attributions = [
integrated_gradients(
model=model,
baseline=baseline,
image=volume,
target_class=target_class,
m_steps=128,
batch_size=1,
)
for volume in volumes
]
if target_class == 1:
postfix = "attribution_pass"
else:
postfix = "attribution_fail"
ig_dataset = tf.data.Dataset.from_tensor_slices(tf.stack(ig_attributions))
tf.data.experimental.save(
ig_dataset,
op.join(LOCAL_OUTPUT_DIR, f"ig_{confusion_class}_{postfix}"),
)
affine = np.diag([1, 1, 1, 1])
volume_niftis = [
{
"b0": nib.Nifti1Image(volume[:, :, :, 3].numpy(), affine),
"color_fa": nib.Nifti1Image(volume[:, :, :, :3].numpy(), affine),
}
for volume in volumes
]
ig_niftis = [
{
"b0": nib.Nifti1Image(attribution[:, :, :, 3].numpy(), affine),
"color_fa": nib.Nifti1Image(attribution[:, :, :, :3].numpy(), affine),
"sum": nib.Nifti1Image(
tf.math.reduce_sum(attribution[:, :, :, :4], axis=-1).numpy(), affine
),
}
for attribution in ig_attributions
]
for idx, (volume_nifti, ig_nifti) in enumerate(zip(volume_niftis, ig_niftis)):
for key, value in volume_nifti.items():
nib.save(
value,
op.join(LOCAL_OUTPUT_DIR, f"{confusion_class}_{key}_{idx}.nii.gz"),
)
for key, value in ig_nifti.items():
nib.save(
value,
op.join(
LOCAL_OUTPUT_DIR, f"{confusion_class}_{postfix}_{key}_{idx}.nii.gz"
),
)
fs.put(LOCAL_OUTPUT_DIR, GCS_OUTPUT_DIR, recursive=True)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--gcs_bucket",
type=str,
help=(
"The name of the gcs bucket that will contain the saved models, "
"checkpoints, etc."
),
)
parser.add_argument(
"--n_channels",
type=int,
help="The number of channels in the data.",
default=5,
)
parser.add_argument(
"--dataset_name",
type=str,
help="The name of the dataset in the tfrecs folder of the GCS bucket.",
default="b0-tensorfa-dwiqc",
)
parser.add_argument(
"--model_dir",
type=str,
help="The name of the GCS directory in which the tensorflow model is saved.",
default="b0_tensorfa_dwiqc",
)
parser.add_argument(
"--dataset_seed",
type=int,
help="The seed for the dataset",
default=8,
)
parser.add_argument(
"--target_class",
type=int,
help="The target class for the integrated gradients.",
default=1,
)
parser.add_argument(
"--confusion_class",
type=str,
help="The confusion class for which to compute integrated gradients",
default="true_pos",
)
args = parser.parse_args()
main(
gcs_bucket=args.gcs_bucket,
n_channels=args.n_channels,
dataset_name=args.dataset_name,
model_dir=args.model_dir,
dataset_seed=args.dataset_seed,
target_class=args.target_class,
confusion_class=args.confusion_class,
)
|
flexible
|
{
"blob_id": "848e4abcd0b4f118030fc62f1272a19bfce9db4e",
"index": 178,
"step-1": "<mask token>\n\n\ndef interpolate_images(baseline, image, alphas):\n alphas_x = alphas[:, tf.newaxis, tf.newaxis, tf.newaxis, tf.newaxis]\n baseline_x = tf.expand_dims(baseline, axis=0)\n input_x = tf.expand_dims(image, axis=0)\n delta = input_x - baseline_x\n images = baseline_x + alphas_x * delta\n return images\n\n\ndef compute_gradients(model, images, target_class):\n with tf.GradientTape() as tape:\n tape.watch(images)\n raw_probs = model(images)\n probs = (1 - raw_probs) * (1 - target_class) + raw_probs * target_class\n gradients = tape.gradient(probs, images)\n return gradients\n\n\ndef integral_approximation(gradients):\n grads = (gradients[:-1] + gradients[1:]) / tf.constant(2.0)\n return tf.math.reduce_mean(grads, axis=0)\n\n\n<mask token>\n\n\ndef main(gcs_bucket, n_channels=5, dataset_name='b0-tensorfa-dwiqc',\n model_dir='b0_tensorfa_dwiqc', dataset_seed=8, target_class=1,\n confusion_class='true_pos'):\n print('Setting gpu thread mode to gpu_private.')\n os.environ['TF_GPU_THREAD_MODE'] = 'gpu_private'\n print('Configuring distribution strategy')\n use_tpu = False\n try:\n resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu='')\n tf.config.experimental_connect_to_cluster(resolver)\n tf.tpu.experimental.initialize_tpu_system(resolver)\n strategy = tf.distribute.TPUStrategy(resolver)\n use_tpu = True\n print('TPU detected.')\n print('All devices: ', tf.config.list_logical_devices('TPU'))\n except ValueError:\n strategy = tf.distribute.MirroredStrategy()\n print('GPUs detected.')\n print('Number of accelerators: ', strategy.num_replicas_in_sync)\n tf.keras.mixed_precision.set_global_policy('mixed_float16')\n scope = strategy.scope()\n GCS_BASE_PATH = f'gs://{gcs_bucket}/{model_dir}/seed_{dataset_seed}'\n GCS_SAVED_MODEL_DIR = op.join(GCS_BASE_PATH, 'saved_model')\n GCS_OUTPUT_DIR = op.join(GCS_BASE_PATH, 'integrated_gradients')\n fs = gcsfs.GCSFileSystem()\n LOCAL_SAVED_MODEL_DIR = 'saved_model'\n LOCAL_OUTPUT_DIR = 'output'\n os.makedirs(LOCAL_SAVED_MODEL_DIR, exist_ok=True)\n os.makedirs(LOCAL_OUTPUT_DIR, exist_ok=True)\n fs.get(GCS_SAVED_MODEL_DIR, LOCAL_SAVED_MODEL_DIR, recursive=True)\n GCS_DATA_PATH = f'gs://{gcs_bucket}'\n GCS_ALLDATA_DIR = op.join(GCS_DATA_PATH, 'tfrecs', dataset_name, 'all-data'\n )\n if use_tpu:\n device_alldata_dir = GCS_ALLDATA_DIR\n else:\n LOCAL_ALLDATA_DIR = op.join('.', 'tfrecs', dataset_name, 'all-data')\n os.makedirs(LOCAL_ALLDATA_DIR, exist_ok=True)\n fs.get(GCS_ALLDATA_DIR, LOCAL_ALLDATA_DIR, recursive=True)\n device_alldata_dir = LOCAL_ALLDATA_DIR\n volume_shape = 128, 128, 128, n_channels\n element_spec = tf.TensorSpec(shape=(), dtype=tf.int64, name=None), (tf.\n TensorSpec(shape=(1, 128, 128, 128, 5), dtype=tf.float32, name=None\n ), tf.TensorSpec(shape=(1,), dtype=tf.float32, name=None))\n dataset = tf.data.experimental.load(op.join(device_alldata_dir,\n confusion_class), element_spec=element_spec)\n volumes = [tf.squeeze(tensor[0]) for _, tensor in dataset]\n baseline = tf.zeros(shape=volume_shape, dtype=tf.float32)\n print('Computing integrated gradients')\n with scope:\n model = tf.keras.models.load_model(LOCAL_SAVED_MODEL_DIR)\n ig_attributions = [integrated_gradients(model=model, baseline=\n baseline, image=volume, target_class=target_class, m_steps=128,\n batch_size=1) for volume in volumes]\n if target_class == 1:\n postfix = 'attribution_pass'\n else:\n postfix = 'attribution_fail'\n ig_dataset = tf.data.Dataset.from_tensor_slices(tf.stack(ig_attributions))\n tf.data.experimental.save(ig_dataset, op.join(LOCAL_OUTPUT_DIR,\n f'ig_{confusion_class}_{postfix}'))\n affine = np.diag([1, 1, 1, 1])\n volume_niftis = [{'b0': nib.Nifti1Image(volume[:, :, :, 3].numpy(),\n affine), 'color_fa': nib.Nifti1Image(volume[:, :, :, :3].numpy(),\n affine)} for volume in volumes]\n ig_niftis = [{'b0': nib.Nifti1Image(attribution[:, :, :, 3].numpy(),\n affine), 'color_fa': nib.Nifti1Image(attribution[:, :, :, :3].numpy\n (), affine), 'sum': nib.Nifti1Image(tf.math.reduce_sum(attribution[\n :, :, :, :4], axis=-1).numpy(), affine)} for attribution in\n ig_attributions]\n for idx, (volume_nifti, ig_nifti) in enumerate(zip(volume_niftis,\n ig_niftis)):\n for key, value in volume_nifti.items():\n nib.save(value, op.join(LOCAL_OUTPUT_DIR,\n f'{confusion_class}_{key}_{idx}.nii.gz'))\n for key, value in ig_nifti.items():\n nib.save(value, op.join(LOCAL_OUTPUT_DIR,\n f'{confusion_class}_{postfix}_{key}_{idx}.nii.gz'))\n fs.put(LOCAL_OUTPUT_DIR, GCS_OUTPUT_DIR, recursive=True)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef interpolate_images(baseline, image, alphas):\n alphas_x = alphas[:, tf.newaxis, tf.newaxis, tf.newaxis, tf.newaxis]\n baseline_x = tf.expand_dims(baseline, axis=0)\n input_x = tf.expand_dims(image, axis=0)\n delta = input_x - baseline_x\n images = baseline_x + alphas_x * delta\n return images\n\n\ndef compute_gradients(model, images, target_class):\n with tf.GradientTape() as tape:\n tape.watch(images)\n raw_probs = model(images)\n probs = (1 - raw_probs) * (1 - target_class) + raw_probs * target_class\n gradients = tape.gradient(probs, images)\n return gradients\n\n\ndef integral_approximation(gradients):\n grads = (gradients[:-1] + gradients[1:]) / tf.constant(2.0)\n return tf.math.reduce_mean(grads, axis=0)\n\n\[email protected]\ndef integrated_gradients(model, baseline, image, target_class, m_steps=50,\n batch_size=32):\n alphas = tf.linspace(start=0.0, stop=1.0, num=m_steps + 1)\n gradient_batches = tf.TensorArray(tf.float32, size=m_steps + 1)\n for alpha in tf.range(0, len(alphas), batch_size):\n from_ = alpha\n to = tf.minimum(from_ + batch_size, len(alphas))\n alpha_batch = alphas[from_:to]\n interpolated_path_input_batch = interpolate_images(baseline=\n baseline, image=image, alphas=alpha_batch)\n gradient_batch = compute_gradients(model=model, images=\n interpolated_path_input_batch, target_class=target_class)\n gradient_batches = gradient_batches.scatter(tf.range(from_, to),\n gradient_batch)\n total_gradients = gradient_batches.stack()\n avg_gradients = integral_approximation(gradients=total_gradients)\n return (image - baseline) * avg_gradients\n\n\ndef main(gcs_bucket, n_channels=5, dataset_name='b0-tensorfa-dwiqc',\n model_dir='b0_tensorfa_dwiqc', dataset_seed=8, target_class=1,\n confusion_class='true_pos'):\n print('Setting gpu thread mode to gpu_private.')\n os.environ['TF_GPU_THREAD_MODE'] = 'gpu_private'\n print('Configuring distribution strategy')\n use_tpu = False\n try:\n resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu='')\n tf.config.experimental_connect_to_cluster(resolver)\n tf.tpu.experimental.initialize_tpu_system(resolver)\n strategy = tf.distribute.TPUStrategy(resolver)\n use_tpu = True\n print('TPU detected.')\n print('All devices: ', tf.config.list_logical_devices('TPU'))\n except ValueError:\n strategy = tf.distribute.MirroredStrategy()\n print('GPUs detected.')\n print('Number of accelerators: ', strategy.num_replicas_in_sync)\n tf.keras.mixed_precision.set_global_policy('mixed_float16')\n scope = strategy.scope()\n GCS_BASE_PATH = f'gs://{gcs_bucket}/{model_dir}/seed_{dataset_seed}'\n GCS_SAVED_MODEL_DIR = op.join(GCS_BASE_PATH, 'saved_model')\n GCS_OUTPUT_DIR = op.join(GCS_BASE_PATH, 'integrated_gradients')\n fs = gcsfs.GCSFileSystem()\n LOCAL_SAVED_MODEL_DIR = 'saved_model'\n LOCAL_OUTPUT_DIR = 'output'\n os.makedirs(LOCAL_SAVED_MODEL_DIR, exist_ok=True)\n os.makedirs(LOCAL_OUTPUT_DIR, exist_ok=True)\n fs.get(GCS_SAVED_MODEL_DIR, LOCAL_SAVED_MODEL_DIR, recursive=True)\n GCS_DATA_PATH = f'gs://{gcs_bucket}'\n GCS_ALLDATA_DIR = op.join(GCS_DATA_PATH, 'tfrecs', dataset_name, 'all-data'\n )\n if use_tpu:\n device_alldata_dir = GCS_ALLDATA_DIR\n else:\n LOCAL_ALLDATA_DIR = op.join('.', 'tfrecs', dataset_name, 'all-data')\n os.makedirs(LOCAL_ALLDATA_DIR, exist_ok=True)\n fs.get(GCS_ALLDATA_DIR, LOCAL_ALLDATA_DIR, recursive=True)\n device_alldata_dir = LOCAL_ALLDATA_DIR\n volume_shape = 128, 128, 128, n_channels\n element_spec = tf.TensorSpec(shape=(), dtype=tf.int64, name=None), (tf.\n TensorSpec(shape=(1, 128, 128, 128, 5), dtype=tf.float32, name=None\n ), tf.TensorSpec(shape=(1,), dtype=tf.float32, name=None))\n dataset = tf.data.experimental.load(op.join(device_alldata_dir,\n confusion_class), element_spec=element_spec)\n volumes = [tf.squeeze(tensor[0]) for _, tensor in dataset]\n baseline = tf.zeros(shape=volume_shape, dtype=tf.float32)\n print('Computing integrated gradients')\n with scope:\n model = tf.keras.models.load_model(LOCAL_SAVED_MODEL_DIR)\n ig_attributions = [integrated_gradients(model=model, baseline=\n baseline, image=volume, target_class=target_class, m_steps=128,\n batch_size=1) for volume in volumes]\n if target_class == 1:\n postfix = 'attribution_pass'\n else:\n postfix = 'attribution_fail'\n ig_dataset = tf.data.Dataset.from_tensor_slices(tf.stack(ig_attributions))\n tf.data.experimental.save(ig_dataset, op.join(LOCAL_OUTPUT_DIR,\n f'ig_{confusion_class}_{postfix}'))\n affine = np.diag([1, 1, 1, 1])\n volume_niftis = [{'b0': nib.Nifti1Image(volume[:, :, :, 3].numpy(),\n affine), 'color_fa': nib.Nifti1Image(volume[:, :, :, :3].numpy(),\n affine)} for volume in volumes]\n ig_niftis = [{'b0': nib.Nifti1Image(attribution[:, :, :, 3].numpy(),\n affine), 'color_fa': nib.Nifti1Image(attribution[:, :, :, :3].numpy\n (), affine), 'sum': nib.Nifti1Image(tf.math.reduce_sum(attribution[\n :, :, :, :4], axis=-1).numpy(), affine)} for attribution in\n ig_attributions]\n for idx, (volume_nifti, ig_nifti) in enumerate(zip(volume_niftis,\n ig_niftis)):\n for key, value in volume_nifti.items():\n nib.save(value, op.join(LOCAL_OUTPUT_DIR,\n f'{confusion_class}_{key}_{idx}.nii.gz'))\n for key, value in ig_nifti.items():\n nib.save(value, op.join(LOCAL_OUTPUT_DIR,\n f'{confusion_class}_{postfix}_{key}_{idx}.nii.gz'))\n fs.put(LOCAL_OUTPUT_DIR, GCS_OUTPUT_DIR, recursive=True)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef interpolate_images(baseline, image, alphas):\n alphas_x = alphas[:, tf.newaxis, tf.newaxis, tf.newaxis, tf.newaxis]\n baseline_x = tf.expand_dims(baseline, axis=0)\n input_x = tf.expand_dims(image, axis=0)\n delta = input_x - baseline_x\n images = baseline_x + alphas_x * delta\n return images\n\n\ndef compute_gradients(model, images, target_class):\n with tf.GradientTape() as tape:\n tape.watch(images)\n raw_probs = model(images)\n probs = (1 - raw_probs) * (1 - target_class) + raw_probs * target_class\n gradients = tape.gradient(probs, images)\n return gradients\n\n\ndef integral_approximation(gradients):\n grads = (gradients[:-1] + gradients[1:]) / tf.constant(2.0)\n return tf.math.reduce_mean(grads, axis=0)\n\n\[email protected]\ndef integrated_gradients(model, baseline, image, target_class, m_steps=50,\n batch_size=32):\n alphas = tf.linspace(start=0.0, stop=1.0, num=m_steps + 1)\n gradient_batches = tf.TensorArray(tf.float32, size=m_steps + 1)\n for alpha in tf.range(0, len(alphas), batch_size):\n from_ = alpha\n to = tf.minimum(from_ + batch_size, len(alphas))\n alpha_batch = alphas[from_:to]\n interpolated_path_input_batch = interpolate_images(baseline=\n baseline, image=image, alphas=alpha_batch)\n gradient_batch = compute_gradients(model=model, images=\n interpolated_path_input_batch, target_class=target_class)\n gradient_batches = gradient_batches.scatter(tf.range(from_, to),\n gradient_batch)\n total_gradients = gradient_batches.stack()\n avg_gradients = integral_approximation(gradients=total_gradients)\n return (image - baseline) * avg_gradients\n\n\ndef main(gcs_bucket, n_channels=5, dataset_name='b0-tensorfa-dwiqc',\n model_dir='b0_tensorfa_dwiqc', dataset_seed=8, target_class=1,\n confusion_class='true_pos'):\n print('Setting gpu thread mode to gpu_private.')\n os.environ['TF_GPU_THREAD_MODE'] = 'gpu_private'\n print('Configuring distribution strategy')\n use_tpu = False\n try:\n resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu='')\n tf.config.experimental_connect_to_cluster(resolver)\n tf.tpu.experimental.initialize_tpu_system(resolver)\n strategy = tf.distribute.TPUStrategy(resolver)\n use_tpu = True\n print('TPU detected.')\n print('All devices: ', tf.config.list_logical_devices('TPU'))\n except ValueError:\n strategy = tf.distribute.MirroredStrategy()\n print('GPUs detected.')\n print('Number of accelerators: ', strategy.num_replicas_in_sync)\n tf.keras.mixed_precision.set_global_policy('mixed_float16')\n scope = strategy.scope()\n GCS_BASE_PATH = f'gs://{gcs_bucket}/{model_dir}/seed_{dataset_seed}'\n GCS_SAVED_MODEL_DIR = op.join(GCS_BASE_PATH, 'saved_model')\n GCS_OUTPUT_DIR = op.join(GCS_BASE_PATH, 'integrated_gradients')\n fs = gcsfs.GCSFileSystem()\n LOCAL_SAVED_MODEL_DIR = 'saved_model'\n LOCAL_OUTPUT_DIR = 'output'\n os.makedirs(LOCAL_SAVED_MODEL_DIR, exist_ok=True)\n os.makedirs(LOCAL_OUTPUT_DIR, exist_ok=True)\n fs.get(GCS_SAVED_MODEL_DIR, LOCAL_SAVED_MODEL_DIR, recursive=True)\n GCS_DATA_PATH = f'gs://{gcs_bucket}'\n GCS_ALLDATA_DIR = op.join(GCS_DATA_PATH, 'tfrecs', dataset_name, 'all-data'\n )\n if use_tpu:\n device_alldata_dir = GCS_ALLDATA_DIR\n else:\n LOCAL_ALLDATA_DIR = op.join('.', 'tfrecs', dataset_name, 'all-data')\n os.makedirs(LOCAL_ALLDATA_DIR, exist_ok=True)\n fs.get(GCS_ALLDATA_DIR, LOCAL_ALLDATA_DIR, recursive=True)\n device_alldata_dir = LOCAL_ALLDATA_DIR\n volume_shape = 128, 128, 128, n_channels\n element_spec = tf.TensorSpec(shape=(), dtype=tf.int64, name=None), (tf.\n TensorSpec(shape=(1, 128, 128, 128, 5), dtype=tf.float32, name=None\n ), tf.TensorSpec(shape=(1,), dtype=tf.float32, name=None))\n dataset = tf.data.experimental.load(op.join(device_alldata_dir,\n confusion_class), element_spec=element_spec)\n volumes = [tf.squeeze(tensor[0]) for _, tensor in dataset]\n baseline = tf.zeros(shape=volume_shape, dtype=tf.float32)\n print('Computing integrated gradients')\n with scope:\n model = tf.keras.models.load_model(LOCAL_SAVED_MODEL_DIR)\n ig_attributions = [integrated_gradients(model=model, baseline=\n baseline, image=volume, target_class=target_class, m_steps=128,\n batch_size=1) for volume in volumes]\n if target_class == 1:\n postfix = 'attribution_pass'\n else:\n postfix = 'attribution_fail'\n ig_dataset = tf.data.Dataset.from_tensor_slices(tf.stack(ig_attributions))\n tf.data.experimental.save(ig_dataset, op.join(LOCAL_OUTPUT_DIR,\n f'ig_{confusion_class}_{postfix}'))\n affine = np.diag([1, 1, 1, 1])\n volume_niftis = [{'b0': nib.Nifti1Image(volume[:, :, :, 3].numpy(),\n affine), 'color_fa': nib.Nifti1Image(volume[:, :, :, :3].numpy(),\n affine)} for volume in volumes]\n ig_niftis = [{'b0': nib.Nifti1Image(attribution[:, :, :, 3].numpy(),\n affine), 'color_fa': nib.Nifti1Image(attribution[:, :, :, :3].numpy\n (), affine), 'sum': nib.Nifti1Image(tf.math.reduce_sum(attribution[\n :, :, :, :4], axis=-1).numpy(), affine)} for attribution in\n ig_attributions]\n for idx, (volume_nifti, ig_nifti) in enumerate(zip(volume_niftis,\n ig_niftis)):\n for key, value in volume_nifti.items():\n nib.save(value, op.join(LOCAL_OUTPUT_DIR,\n f'{confusion_class}_{key}_{idx}.nii.gz'))\n for key, value in ig_nifti.items():\n nib.save(value, op.join(LOCAL_OUTPUT_DIR,\n f'{confusion_class}_{postfix}_{key}_{idx}.nii.gz'))\n fs.put(LOCAL_OUTPUT_DIR, GCS_OUTPUT_DIR, recursive=True)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('--gcs_bucket', type=str, help=\n 'The name of the gcs bucket that will contain the saved models, checkpoints, etc.'\n )\n parser.add_argument('--n_channels', type=int, help=\n 'The number of channels in the data.', default=5)\n parser.add_argument('--dataset_name', type=str, help=\n 'The name of the dataset in the tfrecs folder of the GCS bucket.',\n default='b0-tensorfa-dwiqc')\n parser.add_argument('--model_dir', type=str, help=\n 'The name of the GCS directory in which the tensorflow model is saved.'\n , default='b0_tensorfa_dwiqc')\n parser.add_argument('--dataset_seed', type=int, help=\n 'The seed for the dataset', default=8)\n parser.add_argument('--target_class', type=int, help=\n 'The target class for the integrated gradients.', default=1)\n parser.add_argument('--confusion_class', type=str, help=\n 'The confusion class for which to compute integrated gradients',\n default='true_pos')\n args = parser.parse_args()\n main(gcs_bucket=args.gcs_bucket, n_channels=args.n_channels,\n dataset_name=args.dataset_name, model_dir=args.model_dir,\n dataset_seed=args.dataset_seed, target_class=args.target_class,\n confusion_class=args.confusion_class)\n",
"step-4": "import argparse\nimport gc\nimport gcsfs\nimport nibabel as nib\nimport nilearn\nimport nobrainer\nimport numpy as np\nimport os\nimport os.path as op\nimport pandas as pd\nimport tensorflow as tf\n\n\ndef interpolate_images(baseline, image, alphas):\n alphas_x = alphas[:, tf.newaxis, tf.newaxis, tf.newaxis, tf.newaxis]\n baseline_x = tf.expand_dims(baseline, axis=0)\n input_x = tf.expand_dims(image, axis=0)\n delta = input_x - baseline_x\n images = baseline_x + alphas_x * delta\n return images\n\n\ndef compute_gradients(model, images, target_class):\n with tf.GradientTape() as tape:\n tape.watch(images)\n raw_probs = model(images)\n probs = (1 - raw_probs) * (1 - target_class) + raw_probs * target_class\n gradients = tape.gradient(probs, images)\n return gradients\n\n\ndef integral_approximation(gradients):\n grads = (gradients[:-1] + gradients[1:]) / tf.constant(2.0)\n return tf.math.reduce_mean(grads, axis=0)\n\n\[email protected]\ndef integrated_gradients(model, baseline, image, target_class, m_steps=50,\n batch_size=32):\n alphas = tf.linspace(start=0.0, stop=1.0, num=m_steps + 1)\n gradient_batches = tf.TensorArray(tf.float32, size=m_steps + 1)\n for alpha in tf.range(0, len(alphas), batch_size):\n from_ = alpha\n to = tf.minimum(from_ + batch_size, len(alphas))\n alpha_batch = alphas[from_:to]\n interpolated_path_input_batch = interpolate_images(baseline=\n baseline, image=image, alphas=alpha_batch)\n gradient_batch = compute_gradients(model=model, images=\n interpolated_path_input_batch, target_class=target_class)\n gradient_batches = gradient_batches.scatter(tf.range(from_, to),\n gradient_batch)\n total_gradients = gradient_batches.stack()\n avg_gradients = integral_approximation(gradients=total_gradients)\n return (image - baseline) * avg_gradients\n\n\ndef main(gcs_bucket, n_channels=5, dataset_name='b0-tensorfa-dwiqc',\n model_dir='b0_tensorfa_dwiqc', dataset_seed=8, target_class=1,\n confusion_class='true_pos'):\n print('Setting gpu thread mode to gpu_private.')\n os.environ['TF_GPU_THREAD_MODE'] = 'gpu_private'\n print('Configuring distribution strategy')\n use_tpu = False\n try:\n resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu='')\n tf.config.experimental_connect_to_cluster(resolver)\n tf.tpu.experimental.initialize_tpu_system(resolver)\n strategy = tf.distribute.TPUStrategy(resolver)\n use_tpu = True\n print('TPU detected.')\n print('All devices: ', tf.config.list_logical_devices('TPU'))\n except ValueError:\n strategy = tf.distribute.MirroredStrategy()\n print('GPUs detected.')\n print('Number of accelerators: ', strategy.num_replicas_in_sync)\n tf.keras.mixed_precision.set_global_policy('mixed_float16')\n scope = strategy.scope()\n GCS_BASE_PATH = f'gs://{gcs_bucket}/{model_dir}/seed_{dataset_seed}'\n GCS_SAVED_MODEL_DIR = op.join(GCS_BASE_PATH, 'saved_model')\n GCS_OUTPUT_DIR = op.join(GCS_BASE_PATH, 'integrated_gradients')\n fs = gcsfs.GCSFileSystem()\n LOCAL_SAVED_MODEL_DIR = 'saved_model'\n LOCAL_OUTPUT_DIR = 'output'\n os.makedirs(LOCAL_SAVED_MODEL_DIR, exist_ok=True)\n os.makedirs(LOCAL_OUTPUT_DIR, exist_ok=True)\n fs.get(GCS_SAVED_MODEL_DIR, LOCAL_SAVED_MODEL_DIR, recursive=True)\n GCS_DATA_PATH = f'gs://{gcs_bucket}'\n GCS_ALLDATA_DIR = op.join(GCS_DATA_PATH, 'tfrecs', dataset_name, 'all-data'\n )\n if use_tpu:\n device_alldata_dir = GCS_ALLDATA_DIR\n else:\n LOCAL_ALLDATA_DIR = op.join('.', 'tfrecs', dataset_name, 'all-data')\n os.makedirs(LOCAL_ALLDATA_DIR, exist_ok=True)\n fs.get(GCS_ALLDATA_DIR, LOCAL_ALLDATA_DIR, recursive=True)\n device_alldata_dir = LOCAL_ALLDATA_DIR\n volume_shape = 128, 128, 128, n_channels\n element_spec = tf.TensorSpec(shape=(), dtype=tf.int64, name=None), (tf.\n TensorSpec(shape=(1, 128, 128, 128, 5), dtype=tf.float32, name=None\n ), tf.TensorSpec(shape=(1,), dtype=tf.float32, name=None))\n dataset = tf.data.experimental.load(op.join(device_alldata_dir,\n confusion_class), element_spec=element_spec)\n volumes = [tf.squeeze(tensor[0]) for _, tensor in dataset]\n baseline = tf.zeros(shape=volume_shape, dtype=tf.float32)\n print('Computing integrated gradients')\n with scope:\n model = tf.keras.models.load_model(LOCAL_SAVED_MODEL_DIR)\n ig_attributions = [integrated_gradients(model=model, baseline=\n baseline, image=volume, target_class=target_class, m_steps=128,\n batch_size=1) for volume in volumes]\n if target_class == 1:\n postfix = 'attribution_pass'\n else:\n postfix = 'attribution_fail'\n ig_dataset = tf.data.Dataset.from_tensor_slices(tf.stack(ig_attributions))\n tf.data.experimental.save(ig_dataset, op.join(LOCAL_OUTPUT_DIR,\n f'ig_{confusion_class}_{postfix}'))\n affine = np.diag([1, 1, 1, 1])\n volume_niftis = [{'b0': nib.Nifti1Image(volume[:, :, :, 3].numpy(),\n affine), 'color_fa': nib.Nifti1Image(volume[:, :, :, :3].numpy(),\n affine)} for volume in volumes]\n ig_niftis = [{'b0': nib.Nifti1Image(attribution[:, :, :, 3].numpy(),\n affine), 'color_fa': nib.Nifti1Image(attribution[:, :, :, :3].numpy\n (), affine), 'sum': nib.Nifti1Image(tf.math.reduce_sum(attribution[\n :, :, :, :4], axis=-1).numpy(), affine)} for attribution in\n ig_attributions]\n for idx, (volume_nifti, ig_nifti) in enumerate(zip(volume_niftis,\n ig_niftis)):\n for key, value in volume_nifti.items():\n nib.save(value, op.join(LOCAL_OUTPUT_DIR,\n f'{confusion_class}_{key}_{idx}.nii.gz'))\n for key, value in ig_nifti.items():\n nib.save(value, op.join(LOCAL_OUTPUT_DIR,\n f'{confusion_class}_{postfix}_{key}_{idx}.nii.gz'))\n fs.put(LOCAL_OUTPUT_DIR, GCS_OUTPUT_DIR, recursive=True)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('--gcs_bucket', type=str, help=\n 'The name of the gcs bucket that will contain the saved models, checkpoints, etc.'\n )\n parser.add_argument('--n_channels', type=int, help=\n 'The number of channels in the data.', default=5)\n parser.add_argument('--dataset_name', type=str, help=\n 'The name of the dataset in the tfrecs folder of the GCS bucket.',\n default='b0-tensorfa-dwiqc')\n parser.add_argument('--model_dir', type=str, help=\n 'The name of the GCS directory in which the tensorflow model is saved.'\n , default='b0_tensorfa_dwiqc')\n parser.add_argument('--dataset_seed', type=int, help=\n 'The seed for the dataset', default=8)\n parser.add_argument('--target_class', type=int, help=\n 'The target class for the integrated gradients.', default=1)\n parser.add_argument('--confusion_class', type=str, help=\n 'The confusion class for which to compute integrated gradients',\n default='true_pos')\n args = parser.parse_args()\n main(gcs_bucket=args.gcs_bucket, n_channels=args.n_channels,\n dataset_name=args.dataset_name, model_dir=args.model_dir,\n dataset_seed=args.dataset_seed, target_class=args.target_class,\n confusion_class=args.confusion_class)\n",
"step-5": "import argparse\nimport gc\nimport gcsfs\nimport nibabel as nib\nimport nilearn\nimport nobrainer\nimport numpy as np\nimport os\nimport os.path as op\nimport pandas as pd\nimport tensorflow as tf\n\n\ndef interpolate_images(baseline, image, alphas):\n alphas_x = alphas[:, tf.newaxis, tf.newaxis, tf.newaxis, tf.newaxis]\n baseline_x = tf.expand_dims(baseline, axis=0)\n input_x = tf.expand_dims(image, axis=0)\n delta = input_x - baseline_x\n images = baseline_x + alphas_x * delta\n return images\n\n\ndef compute_gradients(model, images, target_class):\n with tf.GradientTape() as tape:\n tape.watch(images)\n raw_probs = model(images)\n probs = (1 - raw_probs) * (1 - target_class) + raw_probs * target_class\n\n gradients = tape.gradient(probs, images)\n return gradients\n\n\ndef integral_approximation(gradients):\n # riemann_trapezoidal\n grads = (gradients[:-1] + gradients[1:]) / tf.constant(2.0)\n return tf.math.reduce_mean(grads, axis=0)\n\n\[email protected]\ndef integrated_gradients(\n model, baseline, image, target_class, m_steps=50, batch_size=32\n):\n # 1. Generate alphas.\n alphas = tf.linspace(start=0.0, stop=1.0, num=m_steps + 1)\n\n # Initialize TensorArray outside loop to collect gradients.\n gradient_batches = tf.TensorArray(tf.float32, size=m_steps + 1)\n\n # Iterate alphas range and batch computation for speed, memory efficiency, and scaling to larger m_steps.\n for alpha in tf.range(0, len(alphas), batch_size):\n from_ = alpha\n to = tf.minimum(from_ + batch_size, len(alphas))\n alpha_batch = alphas[from_:to]\n\n # 2. Generate interpolated inputs between baseline and input.\n interpolated_path_input_batch = interpolate_images(\n baseline=baseline, image=image, alphas=alpha_batch\n )\n\n # 3. Compute gradients between model outputs and interpolated inputs.\n gradient_batch = compute_gradients(\n model=model,\n images=interpolated_path_input_batch,\n target_class=target_class,\n )\n\n # Write batch indices and gradients to extend TensorArray.\n gradient_batches = gradient_batches.scatter(tf.range(from_, to), gradient_batch)\n\n # Stack path gradients together row-wise into single tensor.\n total_gradients = gradient_batches.stack()\n\n # 4. Integral approximation through averaging gradients.\n avg_gradients = integral_approximation(gradients=total_gradients)\n\n # 5. Scale integrated gradients with respect to input.\n return (image - baseline) * avg_gradients\n\n\ndef main(\n gcs_bucket,\n n_channels=5,\n dataset_name=\"b0-tensorfa-dwiqc\",\n model_dir=\"b0_tensorfa_dwiqc\",\n dataset_seed=8,\n target_class=1,\n confusion_class=\"true_pos\",\n):\n print(\"Setting gpu thread mode to gpu_private.\")\n os.environ[\"TF_GPU_THREAD_MODE\"] = \"gpu_private\"\n\n print(\"Configuring distribution strategy\")\n use_tpu = False\n\n try:\n resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu=\"\")\n tf.config.experimental_connect_to_cluster(resolver)\n # This is the TPU initialization code that has to be at the beginning.\n tf.tpu.experimental.initialize_tpu_system(resolver)\n strategy = tf.distribute.TPUStrategy(resolver)\n\n use_tpu = True\n print(\"TPU detected.\")\n print(\"All devices: \", tf.config.list_logical_devices(\"TPU\"))\n except ValueError:\n strategy = tf.distribute.MirroredStrategy()\n print(\"GPUs detected.\")\n print(\"Number of accelerators: \", strategy.num_replicas_in_sync)\n\n # Train using mixed-precision policy\n tf.keras.mixed_precision.set_global_policy(\"mixed_float16\")\n\n scope = strategy.scope()\n\n # Setting location were training logs and checkpoints will be stored\n GCS_BASE_PATH = f\"gs://{gcs_bucket}/{model_dir}/seed_{dataset_seed}\"\n GCS_SAVED_MODEL_DIR = op.join(GCS_BASE_PATH, \"saved_model\")\n GCS_OUTPUT_DIR = op.join(GCS_BASE_PATH, \"integrated_gradients\")\n\n fs = gcsfs.GCSFileSystem()\n\n LOCAL_SAVED_MODEL_DIR = \"saved_model\"\n LOCAL_OUTPUT_DIR = \"output\"\n os.makedirs(LOCAL_SAVED_MODEL_DIR, exist_ok=True)\n os.makedirs(LOCAL_OUTPUT_DIR, exist_ok=True)\n\n fs.get(GCS_SAVED_MODEL_DIR, LOCAL_SAVED_MODEL_DIR, recursive=True)\n\n # Specify the datasets on GCP storage\n GCS_DATA_PATH = f\"gs://{gcs_bucket}\"\n GCS_ALLDATA_DIR = op.join(GCS_DATA_PATH, \"tfrecs\", dataset_name, \"all-data\")\n\n if use_tpu:\n device_alldata_dir = GCS_ALLDATA_DIR\n else:\n LOCAL_ALLDATA_DIR = op.join(\".\", \"tfrecs\", dataset_name, \"all-data\")\n os.makedirs(LOCAL_ALLDATA_DIR, exist_ok=True)\n fs.get(GCS_ALLDATA_DIR, LOCAL_ALLDATA_DIR, recursive=True)\n device_alldata_dir = LOCAL_ALLDATA_DIR\n\n volume_shape = (128, 128, 128, n_channels)\n element_spec = (\n tf.TensorSpec(shape=(), dtype=tf.int64, name=None),\n (\n tf.TensorSpec(shape=(1, 128, 128, 128, 5), dtype=tf.float32, name=None),\n tf.TensorSpec(shape=(1,), dtype=tf.float32, name=None),\n ),\n )\n\n dataset = tf.data.experimental.load(\n op.join(device_alldata_dir, confusion_class),\n element_spec=element_spec,\n )\n volumes = [tf.squeeze(tensor[0]) for _, tensor in dataset]\n baseline = tf.zeros(shape=volume_shape, dtype=tf.float32)\n\n print(\"Computing integrated gradients\")\n\n with scope:\n model = tf.keras.models.load_model(LOCAL_SAVED_MODEL_DIR)\n\n ig_attributions = [\n integrated_gradients(\n model=model,\n baseline=baseline,\n image=volume,\n target_class=target_class,\n m_steps=128,\n batch_size=1,\n )\n for volume in volumes\n ]\n\n if target_class == 1:\n postfix = \"attribution_pass\"\n else:\n postfix = \"attribution_fail\"\n\n ig_dataset = tf.data.Dataset.from_tensor_slices(tf.stack(ig_attributions))\n tf.data.experimental.save(\n ig_dataset,\n op.join(LOCAL_OUTPUT_DIR, f\"ig_{confusion_class}_{postfix}\"),\n )\n\n affine = np.diag([1, 1, 1, 1])\n volume_niftis = [\n {\n \"b0\": nib.Nifti1Image(volume[:, :, :, 3].numpy(), affine),\n \"color_fa\": nib.Nifti1Image(volume[:, :, :, :3].numpy(), affine),\n }\n for volume in volumes\n ]\n ig_niftis = [\n {\n \"b0\": nib.Nifti1Image(attribution[:, :, :, 3].numpy(), affine),\n \"color_fa\": nib.Nifti1Image(attribution[:, :, :, :3].numpy(), affine),\n \"sum\": nib.Nifti1Image(\n tf.math.reduce_sum(attribution[:, :, :, :4], axis=-1).numpy(), affine\n ),\n }\n for attribution in ig_attributions\n ]\n\n for idx, (volume_nifti, ig_nifti) in enumerate(zip(volume_niftis, ig_niftis)):\n for key, value in volume_nifti.items():\n nib.save(\n value,\n op.join(LOCAL_OUTPUT_DIR, f\"{confusion_class}_{key}_{idx}.nii.gz\"),\n )\n\n for key, value in ig_nifti.items():\n nib.save(\n value,\n op.join(\n LOCAL_OUTPUT_DIR, f\"{confusion_class}_{postfix}_{key}_{idx}.nii.gz\"\n ),\n )\n\n fs.put(LOCAL_OUTPUT_DIR, GCS_OUTPUT_DIR, recursive=True)\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument(\n \"--gcs_bucket\",\n type=str,\n help=(\n \"The name of the gcs bucket that will contain the saved models, \"\n \"checkpoints, etc.\"\n ),\n )\n parser.add_argument(\n \"--n_channels\",\n type=int,\n help=\"The number of channels in the data.\",\n default=5,\n )\n parser.add_argument(\n \"--dataset_name\",\n type=str,\n help=\"The name of the dataset in the tfrecs folder of the GCS bucket.\",\n default=\"b0-tensorfa-dwiqc\",\n )\n parser.add_argument(\n \"--model_dir\",\n type=str,\n help=\"The name of the GCS directory in which the tensorflow model is saved.\",\n default=\"b0_tensorfa_dwiqc\",\n )\n parser.add_argument(\n \"--dataset_seed\",\n type=int,\n help=\"The seed for the dataset\",\n default=8,\n )\n parser.add_argument(\n \"--target_class\",\n type=int,\n help=\"The target class for the integrated gradients.\",\n default=1,\n )\n parser.add_argument(\n \"--confusion_class\",\n type=str,\n help=\"The confusion class for which to compute integrated gradients\",\n default=\"true_pos\",\n )\n\n args = parser.parse_args()\n\n main(\n gcs_bucket=args.gcs_bucket,\n n_channels=args.n_channels,\n dataset_name=args.dataset_name,\n model_dir=args.model_dir,\n dataset_seed=args.dataset_seed,\n target_class=args.target_class,\n confusion_class=args.confusion_class,\n )\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if comm.rank == 0:
print('Data = ', data)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
comm = MPI.COMM_WORLD
mydata = comm.rank
data = comm.gather(mydata)
if comm.rank == 0:
print('Data = ', data)
<|reserved_special_token_1|>
from mpi4py import MPI
from random import random
comm = MPI.COMM_WORLD
mydata = comm.rank
data = comm.gather(mydata)
if comm.rank == 0:
print('Data = ', data)
<|reserved_special_token_1|>
from mpi4py import MPI
from random import random
comm = MPI.COMM_WORLD
mydata = comm.rank
data = comm.gather(mydata)
if comm.rank == 0:
print("Data = ", data)
|
flexible
|
{
"blob_id": "acf3d188bd6c99774ddf538dcc83f99ad56c7057",
"index": 7431,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif comm.rank == 0:\n print('Data = ', data)\n",
"step-3": "<mask token>\ncomm = MPI.COMM_WORLD\nmydata = comm.rank\ndata = comm.gather(mydata)\nif comm.rank == 0:\n print('Data = ', data)\n",
"step-4": "from mpi4py import MPI\nfrom random import random\ncomm = MPI.COMM_WORLD\nmydata = comm.rank\ndata = comm.gather(mydata)\nif comm.rank == 0:\n print('Data = ', data)\n",
"step-5": "from mpi4py import MPI\nfrom random import random\n\ncomm = MPI.COMM_WORLD\n\nmydata = comm.rank\n\ndata = comm.gather(mydata)\n\nif comm.rank == 0:\n print(\"Data = \", data)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def index(request):
all_games = game.objects.all()
context = {'all_games': all_games}
return render(request, 'game/index.html', context)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def index(request):
all_games = game.objects.all()
context = {'all_games': all_games}
return render(request, 'game/index.html', context)
def gameview(response):
return HttpResponse('<h1>Ludo King</h1>')
<|reserved_special_token_1|>
from django.http import HttpResponse
from django.shortcuts import render
from .models import game
def index(request):
all_games = game.objects.all()
context = {'all_games': all_games}
return render(request, 'game/index.html', context)
def gameview(response):
return HttpResponse('<h1>Ludo King</h1>')
<|reserved_special_token_1|>
from django.http import HttpResponse
from django.shortcuts import render
from .models import game
def index(request):
all_games = game.objects.all()
context = {
'all_games' : all_games
}
return render(request,'game/index.html',context)
def gameview(response):
return HttpResponse("<h1>Ludo King</h1>")
|
flexible
|
{
"blob_id": "6623ac194e380c9554d72a1b20bf860b958dda97",
"index": 5961,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef index(request):\n all_games = game.objects.all()\n context = {'all_games': all_games}\n return render(request, 'game/index.html', context)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef index(request):\n all_games = game.objects.all()\n context = {'all_games': all_games}\n return render(request, 'game/index.html', context)\n\n\ndef gameview(response):\n return HttpResponse('<h1>Ludo King</h1>')\n",
"step-4": "from django.http import HttpResponse\nfrom django.shortcuts import render\nfrom .models import game\n\n\ndef index(request):\n all_games = game.objects.all()\n context = {'all_games': all_games}\n return render(request, 'game/index.html', context)\n\n\ndef gameview(response):\n return HttpResponse('<h1>Ludo King</h1>')\n",
"step-5": "from django.http import HttpResponse\nfrom django.shortcuts import render\nfrom .models import game\n\ndef index(request):\n all_games = game.objects.all()\n context = {\n 'all_games' : all_games\n }\n return render(request,'game/index.html',context)\n\ndef gameview(response):\n return HttpResponse(\"<h1>Ludo King</h1>\")\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
####################################################################################
# About
# Date: April 12, 2018
# Notes
'''
Code that renames a list of files in a directory
MUST Run in Python 3 environment!
jpeg Drop extra number at the end of unique ID
add DEL or INS based on variant type
'''
'''
Resources
---------
https://gist.github.com/seanh/93666
https://www.youtube.com/watch?v=ve2pmm5JqmI
https://www.youtube.com/watch?v=WQVisBzJGLw
'''
####################################################################################
import os
path2 = '/Volumes/lesleydata/manual_Curation_app/images/svviz_JMZook/1000_Rand_Samp_INS_DEL_2/app_images/DEL/PBDEL'
os.chdir('/Volumes/lesleydata/manual_Curation_app/images/svviz_JMZook/1000_Rand_Samp_INS_DEL_2/app_images/DEL/PBDEL')
for f in os.listdir():
file_name, file_ext = os.path.splitext(f)
file_name = file_name.replace('_DEL', '')
# file_name = file_name.replace(' 1', '')
# file_name = file_name.replace(' 2', '')
# file_name = file_name.replace(' 3', '')
# file_name = file_name.replace(' 4', '')
# file_name = file_name.replace(' 5', '')
# file_name = file_name.replace(' 6', '')
# file_name = file_name.replace(' 7', '')
# file_name = file_name.replace(' 8', '')
# file_name = file_name.replace(' 9', '')
# file_name = file_name.replace(' 10', '')
# file_name = file_name.replace(' 11', '')
# file_name = file_name.replace(' 12', '')
os.rename(os.path.join(path2, f), os.path.join(path2, file_name + '.jpeg'))
|
normal
|
{
"blob_id": "d483314fa7e8a2514fd5089b872b9e480e7454f4",
"index": 8116,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nos.chdir(\n '/Volumes/lesleydata/manual_Curation_app/images/svviz_JMZook/1000_Rand_Samp_INS_DEL_2/app_images/DEL/PBDEL'\n )\nfor f in os.listdir():\n file_name, file_ext = os.path.splitext(f)\n file_name = file_name.replace('_DEL', '')\n os.rename(os.path.join(path2, f), os.path.join(path2, file_name + '.jpeg'))\n",
"step-3": "<mask token>\npath2 = (\n '/Volumes/lesleydata/manual_Curation_app/images/svviz_JMZook/1000_Rand_Samp_INS_DEL_2/app_images/DEL/PBDEL'\n )\nos.chdir(\n '/Volumes/lesleydata/manual_Curation_app/images/svviz_JMZook/1000_Rand_Samp_INS_DEL_2/app_images/DEL/PBDEL'\n )\nfor f in os.listdir():\n file_name, file_ext = os.path.splitext(f)\n file_name = file_name.replace('_DEL', '')\n os.rename(os.path.join(path2, f), os.path.join(path2, file_name + '.jpeg'))\n",
"step-4": "<mask token>\nimport os\npath2 = (\n '/Volumes/lesleydata/manual_Curation_app/images/svviz_JMZook/1000_Rand_Samp_INS_DEL_2/app_images/DEL/PBDEL'\n )\nos.chdir(\n '/Volumes/lesleydata/manual_Curation_app/images/svviz_JMZook/1000_Rand_Samp_INS_DEL_2/app_images/DEL/PBDEL'\n )\nfor f in os.listdir():\n file_name, file_ext = os.path.splitext(f)\n file_name = file_name.replace('_DEL', '')\n os.rename(os.path.join(path2, f), os.path.join(path2, file_name + '.jpeg'))\n",
"step-5": "####################################################################################\n# About\n# Date: April 12, 2018\n# Notes\n'''\nCode that renames a list of files in a directory\nMUST Run in Python 3 environment!\n\njpeg Drop extra number at the end of unique ID\nadd DEL or INS based on variant type\n'''\n\n'''\nResources\n---------\nhttps://gist.github.com/seanh/93666\nhttps://www.youtube.com/watch?v=ve2pmm5JqmI\nhttps://www.youtube.com/watch?v=WQVisBzJGLw\n'''\n\n####################################################################################\n\nimport os\npath2 = '/Volumes/lesleydata/manual_Curation_app/images/svviz_JMZook/1000_Rand_Samp_INS_DEL_2/app_images/DEL/PBDEL'\nos.chdir('/Volumes/lesleydata/manual_Curation_app/images/svviz_JMZook/1000_Rand_Samp_INS_DEL_2/app_images/DEL/PBDEL')\n\nfor f in os.listdir():\n\tfile_name, file_ext = os.path.splitext(f)\n\tfile_name = file_name.replace('_DEL', '')\n\t# file_name = file_name.replace(' 1', '')\n\t# file_name = file_name.replace(' 2', '')\n\t# file_name = file_name.replace(' 3', '')\n\t# file_name = file_name.replace(' 4', '')\n\t# file_name = file_name.replace(' 5', '')\n\t# file_name = file_name.replace(' 6', '')\n\t# file_name = file_name.replace(' 7', '')\n\t# file_name = file_name.replace(' 8', '')\n\t# file_name = file_name.replace(' 9', '')\n\t# file_name = file_name.replace(' 10', '')\n\t# file_name = file_name.replace(' 11', '')\n\t# file_name = file_name.replace(' 12', '')\n\tos.rename(os.path.join(path2, f), os.path.join(path2, file_name + '.jpeg'))\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
def reverse_string(seq):
return seq[::-1]
def complement(seq):
seq = seq.upper()
basecomplement = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A', 'N': 'N'}
letters = list(seq)
letters = [basecomplement[base] for base in letters]
return ''.join(letters)
def reversecomplement(seq):
seq = reverse_string(seq)
seq = complement(seq)
return seq
def DNA_To_AA(seq):
RNA_AA_dict = {'TCC': 'S', 'TAC': 'Y', 'AGT': 'S', 'ACG': 'T', 'TAA':
'*', 'TTA': 'L', 'GTC': 'V', 'CAC': 'H', 'CGT': 'R', 'CGG': 'R',
'CTC': 'L', 'AGG': 'R', 'ACA': 'T', 'TCA': 'S', 'CCT': 'P', 'CAG':
'Q', 'ACC': 'T', 'TTC': 'F', 'ATC': 'I', 'AAT': 'N', 'ATA': 'I',
'CAT': 'H', 'GGC': 'G', 'GGG': 'G', 'GCT': 'A', 'GAT': 'D', 'GCA':
'A', 'GCG': 'A', 'GTA': 'V', 'GAC': 'D', 'CTT': 'L', 'CAA': 'Q',
'CCG': 'P', 'AAG': 'K', 'GTT': 'V', 'GGT': 'G', 'TAT': 'Y', 'TGG':
'W', 'AGA': 'R', 'TTT': 'F', 'TAG': '*', 'TGC': 'C', 'GGA': 'G',
'CCA': 'P', 'GCC': 'A', 'CGA': 'R', 'AAA': 'K', 'GTG': 'V', 'CGC':
'R', 'CTG': 'L', 'TCG': 'S', 'TTG': 'L', 'GAA': 'E', 'GAG': 'E',
'TCT': 'S', 'ATT': 'I', 'AAC': 'N', 'ACT': 'T', 'TGT': 'C', 'CTA':
'L', 'ATG': 'M', 'CCC': 'P', 'AGC': 'S', 'TGA': '*'}
F_position = 0
R_position = 0
Aa = ''
for i in range(int(len(seq) / 3)):
F_position = i * 3
R_position = F_position + 3
RNA_one = seq[F_position:R_position]
Aa += RNA_AA_dict[RNA_one]
return Aa
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def reverse_string(seq):
return seq[::-1]
def complement(seq):
seq = seq.upper()
basecomplement = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A', 'N': 'N'}
letters = list(seq)
letters = [basecomplement[base] for base in letters]
return ''.join(letters)
def reversecomplement(seq):
seq = reverse_string(seq)
seq = complement(seq)
return seq
def DNA_To_AA(seq):
RNA_AA_dict = {'TCC': 'S', 'TAC': 'Y', 'AGT': 'S', 'ACG': 'T', 'TAA':
'*', 'TTA': 'L', 'GTC': 'V', 'CAC': 'H', 'CGT': 'R', 'CGG': 'R',
'CTC': 'L', 'AGG': 'R', 'ACA': 'T', 'TCA': 'S', 'CCT': 'P', 'CAG':
'Q', 'ACC': 'T', 'TTC': 'F', 'ATC': 'I', 'AAT': 'N', 'ATA': 'I',
'CAT': 'H', 'GGC': 'G', 'GGG': 'G', 'GCT': 'A', 'GAT': 'D', 'GCA':
'A', 'GCG': 'A', 'GTA': 'V', 'GAC': 'D', 'CTT': 'L', 'CAA': 'Q',
'CCG': 'P', 'AAG': 'K', 'GTT': 'V', 'GGT': 'G', 'TAT': 'Y', 'TGG':
'W', 'AGA': 'R', 'TTT': 'F', 'TAG': '*', 'TGC': 'C', 'GGA': 'G',
'CCA': 'P', 'GCC': 'A', 'CGA': 'R', 'AAA': 'K', 'GTG': 'V', 'CGC':
'R', 'CTG': 'L', 'TCG': 'S', 'TTG': 'L', 'GAA': 'E', 'GAG': 'E',
'TCT': 'S', 'ATT': 'I', 'AAC': 'N', 'ACT': 'T', 'TGT': 'C', 'CTA':
'L', 'ATG': 'M', 'CCC': 'P', 'AGC': 'S', 'TGA': '*'}
F_position = 0
R_position = 0
Aa = ''
for i in range(int(len(seq) / 3)):
F_position = i * 3
R_position = F_position + 3
RNA_one = seq[F_position:R_position]
Aa += RNA_AA_dict[RNA_one]
return Aa
def Peptide_Encoding(DNA, AA_input):
AA = DNA_To_AA(DNA)
print(AA)
l = len(AA_input)
return_DNA = []
find_position = 0
while AA_input in AA[find_position:]:
AA_position = find_position + AA[find_position:].find(AA_input)
DNA_position = 3 * AA_position
return_DNA.append(DNA[DNA_position:DNA_position + 3 * l])
find_position = AA_position + 1
return return_DNA
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def reverse_string(seq):
return seq[::-1]
def complement(seq):
seq = seq.upper()
basecomplement = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A', 'N': 'N'}
letters = list(seq)
letters = [basecomplement[base] for base in letters]
return ''.join(letters)
def reversecomplement(seq):
seq = reverse_string(seq)
seq = complement(seq)
return seq
def DNA_To_AA(seq):
RNA_AA_dict = {'TCC': 'S', 'TAC': 'Y', 'AGT': 'S', 'ACG': 'T', 'TAA':
'*', 'TTA': 'L', 'GTC': 'V', 'CAC': 'H', 'CGT': 'R', 'CGG': 'R',
'CTC': 'L', 'AGG': 'R', 'ACA': 'T', 'TCA': 'S', 'CCT': 'P', 'CAG':
'Q', 'ACC': 'T', 'TTC': 'F', 'ATC': 'I', 'AAT': 'N', 'ATA': 'I',
'CAT': 'H', 'GGC': 'G', 'GGG': 'G', 'GCT': 'A', 'GAT': 'D', 'GCA':
'A', 'GCG': 'A', 'GTA': 'V', 'GAC': 'D', 'CTT': 'L', 'CAA': 'Q',
'CCG': 'P', 'AAG': 'K', 'GTT': 'V', 'GGT': 'G', 'TAT': 'Y', 'TGG':
'W', 'AGA': 'R', 'TTT': 'F', 'TAG': '*', 'TGC': 'C', 'GGA': 'G',
'CCA': 'P', 'GCC': 'A', 'CGA': 'R', 'AAA': 'K', 'GTG': 'V', 'CGC':
'R', 'CTG': 'L', 'TCG': 'S', 'TTG': 'L', 'GAA': 'E', 'GAG': 'E',
'TCT': 'S', 'ATT': 'I', 'AAC': 'N', 'ACT': 'T', 'TGT': 'C', 'CTA':
'L', 'ATG': 'M', 'CCC': 'P', 'AGC': 'S', 'TGA': '*'}
F_position = 0
R_position = 0
Aa = ''
for i in range(int(len(seq) / 3)):
F_position = i * 3
R_position = F_position + 3
RNA_one = seq[F_position:R_position]
Aa += RNA_AA_dict[RNA_one]
return Aa
def Peptide_Encoding(DNA, AA_input):
AA = DNA_To_AA(DNA)
print(AA)
l = len(AA_input)
return_DNA = []
find_position = 0
while AA_input in AA[find_position:]:
AA_position = find_position + AA[find_position:].find(AA_input)
DNA_position = 3 * AA_position
return_DNA.append(DNA[DNA_position:DNA_position + 3 * l])
find_position = AA_position + 1
return return_DNA
<|reserved_special_token_0|>
for i in fileread:
read = i.strip()
DNA += read.upper()
print(DNA[:200])
<|reserved_special_token_0|>
if print1 != []:
for i in print1:
print('1', i)
<|reserved_special_token_0|>
if print2 != []:
for i in print2:
print('2', i)
<|reserved_special_token_0|>
if print3 != []:
for i in print3:
print('3', i)
<|reserved_special_token_0|>
if print4 != []:
for i in print4:
print('4', reversecomplement(i))
<|reserved_special_token_0|>
if print5 != []:
for i in print5:
print('5', reversecomplement(i))
<|reserved_special_token_0|>
if print6 != []:
for i in print6:
print('6', reversecomplement(i))
<|reserved_special_token_1|>
def reverse_string(seq):
return seq[::-1]
def complement(seq):
seq = seq.upper()
basecomplement = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A', 'N': 'N'}
letters = list(seq)
letters = [basecomplement[base] for base in letters]
return ''.join(letters)
def reversecomplement(seq):
seq = reverse_string(seq)
seq = complement(seq)
return seq
def DNA_To_AA(seq):
RNA_AA_dict = {'TCC': 'S', 'TAC': 'Y', 'AGT': 'S', 'ACG': 'T', 'TAA':
'*', 'TTA': 'L', 'GTC': 'V', 'CAC': 'H', 'CGT': 'R', 'CGG': 'R',
'CTC': 'L', 'AGG': 'R', 'ACA': 'T', 'TCA': 'S', 'CCT': 'P', 'CAG':
'Q', 'ACC': 'T', 'TTC': 'F', 'ATC': 'I', 'AAT': 'N', 'ATA': 'I',
'CAT': 'H', 'GGC': 'G', 'GGG': 'G', 'GCT': 'A', 'GAT': 'D', 'GCA':
'A', 'GCG': 'A', 'GTA': 'V', 'GAC': 'D', 'CTT': 'L', 'CAA': 'Q',
'CCG': 'P', 'AAG': 'K', 'GTT': 'V', 'GGT': 'G', 'TAT': 'Y', 'TGG':
'W', 'AGA': 'R', 'TTT': 'F', 'TAG': '*', 'TGC': 'C', 'GGA': 'G',
'CCA': 'P', 'GCC': 'A', 'CGA': 'R', 'AAA': 'K', 'GTG': 'V', 'CGC':
'R', 'CTG': 'L', 'TCG': 'S', 'TTG': 'L', 'GAA': 'E', 'GAG': 'E',
'TCT': 'S', 'ATT': 'I', 'AAC': 'N', 'ACT': 'T', 'TGT': 'C', 'CTA':
'L', 'ATG': 'M', 'CCC': 'P', 'AGC': 'S', 'TGA': '*'}
F_position = 0
R_position = 0
Aa = ''
for i in range(int(len(seq) / 3)):
F_position = i * 3
R_position = F_position + 3
RNA_one = seq[F_position:R_position]
Aa += RNA_AA_dict[RNA_one]
return Aa
def Peptide_Encoding(DNA, AA_input):
AA = DNA_To_AA(DNA)
print(AA)
l = len(AA_input)
return_DNA = []
find_position = 0
while AA_input in AA[find_position:]:
AA_position = find_position + AA[find_position:].find(AA_input)
DNA_position = 3 * AA_position
return_DNA.append(DNA[DNA_position:DNA_position + 3 * l])
find_position = AA_position + 1
return return_DNA
DNA = ''
filename = input('Enter file name: ')
fileread = open(filename, 'r')
for i in fileread:
read = i.strip()
DNA += read.upper()
print(DNA[:200])
F_position = 0
R_position = 0
Aa_input = input('what is the aa?')
DNA_F_1 = DNA
print1 = Peptide_Encoding(DNA_F_1, Aa_input)
if print1 != []:
for i in print1:
print('1', i)
DNA_F_2 = DNA[1:]
print2 = Peptide_Encoding(DNA_F_2, Aa_input)
if print2 != []:
for i in print2:
print('2', i)
DNA_F_3 = DNA[2:]
print3 = Peptide_Encoding(DNA_F_3, Aa_input)
if print3 != []:
for i in print3:
print('3', i)
RC_DNA = reversecomplement(DNA)
DNA_R_1 = RC_DNA
print4 = Peptide_Encoding(DNA_R_1, Aa_input)
if print4 != []:
for i in print4:
print('4', reversecomplement(i))
DNA_R_2 = RC_DNA[1:]
print5 = Peptide_Encoding(DNA_R_2, Aa_input)
if print5 != []:
for i in print5:
print('5', reversecomplement(i))
DNA_R_3 = RC_DNA[2:]
print6 = Peptide_Encoding(DNA_R_3, Aa_input)
if print6 != []:
for i in print6:
print('6', reversecomplement(i))
<|reserved_special_token_1|>
#Peptide Encoding Problem: Find substrings of a genome encoding a given amino acid sequence.
# Input: A DNA string Text, an amino acid string Peptide, and the array GeneticCode.
# Output: All substrings of Text encoding Peptide (if any such substrings exist).
def reverse_string(seq):
return seq[::-1]
def complement(seq):
#return the complementary sequence string.
seq=seq.upper()
basecomplement={"A":"T","C":"G","G":"C","T":"A","N":"N"}
letters=list(seq)
letters=[basecomplement[base] for base in letters]
return ''.join(letters)
def reversecomplement(seq):
#return the reverse complement of the dna string.
seq=reverse_string(seq)
seq=complement(seq)
return seq
def DNA_To_AA(seq):
RNA_AA_dict = {'TCC': 'S', 'TAC': 'Y', 'AGT': 'S', 'ACG': 'T', 'TAA': '*', 'TTA': 'L', 'GTC': 'V', 'CAC': 'H',
'CGT': 'R', 'CGG': 'R', 'CTC': 'L', 'AGG': 'R', 'ACA': 'T', 'TCA': 'S', 'CCT': 'P', 'CAG': 'Q',
'ACC': 'T', 'TTC': 'F', 'ATC': 'I', 'AAT': 'N', 'ATA': 'I', 'CAT': 'H', 'GGC': 'G', 'GGG': 'G',
'GCT': 'A', 'GAT': 'D', 'GCA': 'A', 'GCG': 'A', 'GTA': 'V', 'GAC': 'D', 'CTT': 'L', 'CAA': 'Q',
'CCG': 'P', 'AAG': 'K', 'GTT': 'V', 'GGT': 'G', 'TAT': 'Y', 'TGG': 'W', 'AGA': 'R', 'TTT': 'F',
'TAG': '*', 'TGC': 'C', 'GGA': 'G', 'CCA': 'P', 'GCC': 'A', 'CGA': 'R', 'AAA': 'K', 'GTG': 'V',
'CGC': 'R', 'CTG': 'L', 'TCG': 'S', 'TTG': 'L', 'GAA': 'E', 'GAG': 'E', 'TCT': 'S', 'ATT': 'I',
'AAC': 'N', 'ACT': 'T', 'TGT': 'C', 'CTA': 'L', 'ATG': 'M', 'CCC': 'P', 'AGC': 'S', 'TGA': '*'}
F_position = 0
R_position = 0
Aa=""
for i in range(int(len(seq) / 3)):
F_position = i*3
R_position = F_position+3
RNA_one=seq[F_position:R_position]
#if RNA_one == "TAA" or RNA_one == "TAG" or RNA_one == "TGA":
# break
Aa += RNA_AA_dict[RNA_one]
return Aa
def Peptide_Encoding(DNA,AA_input):
AA= DNA_To_AA(DNA)
print(AA)
l=len(AA_input)
return_DNA=[]
find_position=0
#print(DNA,AA,l,return_DNA,find_position)
while AA_input in AA[find_position:]:
#print(AA[find_position:])
AA_position = find_position + AA[find_position:].find(AA_input)
DNA_position = 3 * AA_position
#print(AA_position, DNA_position)
return_DNA.append(DNA[DNA_position:DNA_position + 3 * l])
find_position = AA_position + 1
#print(find_position)
return return_DNA
DNA=""
filename = input("Enter file name: ")
fileread = open(filename, "r")
for i in fileread:
read = i.strip()
DNA+=read.upper()
print(DNA[:200])
F_position=0
R_position=0
Aa_input=input("what is the aa?")
DNA_F_1=DNA
print1=Peptide_Encoding(DNA_F_1,Aa_input)
if print1!=[]:
for i in print1:
print("1",i)
DNA_F_2=DNA[1:]
print2=Peptide_Encoding(DNA_F_2,Aa_input)
if print2!=[]:
for i in print2:
print("2",i)
DNA_F_3=DNA[2:]
print3=Peptide_Encoding(DNA_F_3,Aa_input)
if print3!=[]:
for i in print3:
print("3",i)
RC_DNA=reversecomplement(DNA)
DNA_R_1=RC_DNA
print4=Peptide_Encoding(DNA_R_1,Aa_input)
if print4!=[]:
for i in print4:
print("4",reversecomplement(i))
DNA_R_2=RC_DNA[1:]
print5=Peptide_Encoding(DNA_R_2,Aa_input)
if print5!=[]:
for i in print5:
print("5",reversecomplement(i))
DNA_R_3=RC_DNA[2:]
print6=Peptide_Encoding(DNA_R_3,Aa_input)
if print6!=[]:
for i in print6:
print("6",reversecomplement(i))
#print(DNA_F_1,DNA_F_2,DNA_F_3,DNA_R_1,DNA_R_2,DNA_R_3)
#print(Aa_F_1,Aa_F_2,Aa_F_3,Aa_R_1,Aa_R_2,Aa_R_3)
#根据AA序列在基因组中寻找相关序列
#Bacillus brevis.txt
#VKLFPWFNQY
|
flexible
|
{
"blob_id": "0f2d215a34758f85a29ef7ed8264fccd5e85b66f",
"index": 3017,
"step-1": "def reverse_string(seq):\n return seq[::-1]\n\n\ndef complement(seq):\n seq = seq.upper()\n basecomplement = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A', 'N': 'N'}\n letters = list(seq)\n letters = [basecomplement[base] for base in letters]\n return ''.join(letters)\n\n\ndef reversecomplement(seq):\n seq = reverse_string(seq)\n seq = complement(seq)\n return seq\n\n\ndef DNA_To_AA(seq):\n RNA_AA_dict = {'TCC': 'S', 'TAC': 'Y', 'AGT': 'S', 'ACG': 'T', 'TAA':\n '*', 'TTA': 'L', 'GTC': 'V', 'CAC': 'H', 'CGT': 'R', 'CGG': 'R',\n 'CTC': 'L', 'AGG': 'R', 'ACA': 'T', 'TCA': 'S', 'CCT': 'P', 'CAG':\n 'Q', 'ACC': 'T', 'TTC': 'F', 'ATC': 'I', 'AAT': 'N', 'ATA': 'I',\n 'CAT': 'H', 'GGC': 'G', 'GGG': 'G', 'GCT': 'A', 'GAT': 'D', 'GCA':\n 'A', 'GCG': 'A', 'GTA': 'V', 'GAC': 'D', 'CTT': 'L', 'CAA': 'Q',\n 'CCG': 'P', 'AAG': 'K', 'GTT': 'V', 'GGT': 'G', 'TAT': 'Y', 'TGG':\n 'W', 'AGA': 'R', 'TTT': 'F', 'TAG': '*', 'TGC': 'C', 'GGA': 'G',\n 'CCA': 'P', 'GCC': 'A', 'CGA': 'R', 'AAA': 'K', 'GTG': 'V', 'CGC':\n 'R', 'CTG': 'L', 'TCG': 'S', 'TTG': 'L', 'GAA': 'E', 'GAG': 'E',\n 'TCT': 'S', 'ATT': 'I', 'AAC': 'N', 'ACT': 'T', 'TGT': 'C', 'CTA':\n 'L', 'ATG': 'M', 'CCC': 'P', 'AGC': 'S', 'TGA': '*'}\n F_position = 0\n R_position = 0\n Aa = ''\n for i in range(int(len(seq) / 3)):\n F_position = i * 3\n R_position = F_position + 3\n RNA_one = seq[F_position:R_position]\n Aa += RNA_AA_dict[RNA_one]\n return Aa\n\n\n<mask token>\n",
"step-2": "def reverse_string(seq):\n return seq[::-1]\n\n\ndef complement(seq):\n seq = seq.upper()\n basecomplement = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A', 'N': 'N'}\n letters = list(seq)\n letters = [basecomplement[base] for base in letters]\n return ''.join(letters)\n\n\ndef reversecomplement(seq):\n seq = reverse_string(seq)\n seq = complement(seq)\n return seq\n\n\ndef DNA_To_AA(seq):\n RNA_AA_dict = {'TCC': 'S', 'TAC': 'Y', 'AGT': 'S', 'ACG': 'T', 'TAA':\n '*', 'TTA': 'L', 'GTC': 'V', 'CAC': 'H', 'CGT': 'R', 'CGG': 'R',\n 'CTC': 'L', 'AGG': 'R', 'ACA': 'T', 'TCA': 'S', 'CCT': 'P', 'CAG':\n 'Q', 'ACC': 'T', 'TTC': 'F', 'ATC': 'I', 'AAT': 'N', 'ATA': 'I',\n 'CAT': 'H', 'GGC': 'G', 'GGG': 'G', 'GCT': 'A', 'GAT': 'D', 'GCA':\n 'A', 'GCG': 'A', 'GTA': 'V', 'GAC': 'D', 'CTT': 'L', 'CAA': 'Q',\n 'CCG': 'P', 'AAG': 'K', 'GTT': 'V', 'GGT': 'G', 'TAT': 'Y', 'TGG':\n 'W', 'AGA': 'R', 'TTT': 'F', 'TAG': '*', 'TGC': 'C', 'GGA': 'G',\n 'CCA': 'P', 'GCC': 'A', 'CGA': 'R', 'AAA': 'K', 'GTG': 'V', 'CGC':\n 'R', 'CTG': 'L', 'TCG': 'S', 'TTG': 'L', 'GAA': 'E', 'GAG': 'E',\n 'TCT': 'S', 'ATT': 'I', 'AAC': 'N', 'ACT': 'T', 'TGT': 'C', 'CTA':\n 'L', 'ATG': 'M', 'CCC': 'P', 'AGC': 'S', 'TGA': '*'}\n F_position = 0\n R_position = 0\n Aa = ''\n for i in range(int(len(seq) / 3)):\n F_position = i * 3\n R_position = F_position + 3\n RNA_one = seq[F_position:R_position]\n Aa += RNA_AA_dict[RNA_one]\n return Aa\n\n\ndef Peptide_Encoding(DNA, AA_input):\n AA = DNA_To_AA(DNA)\n print(AA)\n l = len(AA_input)\n return_DNA = []\n find_position = 0\n while AA_input in AA[find_position:]:\n AA_position = find_position + AA[find_position:].find(AA_input)\n DNA_position = 3 * AA_position\n return_DNA.append(DNA[DNA_position:DNA_position + 3 * l])\n find_position = AA_position + 1\n return return_DNA\n\n\n<mask token>\n",
"step-3": "def reverse_string(seq):\n return seq[::-1]\n\n\ndef complement(seq):\n seq = seq.upper()\n basecomplement = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A', 'N': 'N'}\n letters = list(seq)\n letters = [basecomplement[base] for base in letters]\n return ''.join(letters)\n\n\ndef reversecomplement(seq):\n seq = reverse_string(seq)\n seq = complement(seq)\n return seq\n\n\ndef DNA_To_AA(seq):\n RNA_AA_dict = {'TCC': 'S', 'TAC': 'Y', 'AGT': 'S', 'ACG': 'T', 'TAA':\n '*', 'TTA': 'L', 'GTC': 'V', 'CAC': 'H', 'CGT': 'R', 'CGG': 'R',\n 'CTC': 'L', 'AGG': 'R', 'ACA': 'T', 'TCA': 'S', 'CCT': 'P', 'CAG':\n 'Q', 'ACC': 'T', 'TTC': 'F', 'ATC': 'I', 'AAT': 'N', 'ATA': 'I',\n 'CAT': 'H', 'GGC': 'G', 'GGG': 'G', 'GCT': 'A', 'GAT': 'D', 'GCA':\n 'A', 'GCG': 'A', 'GTA': 'V', 'GAC': 'D', 'CTT': 'L', 'CAA': 'Q',\n 'CCG': 'P', 'AAG': 'K', 'GTT': 'V', 'GGT': 'G', 'TAT': 'Y', 'TGG':\n 'W', 'AGA': 'R', 'TTT': 'F', 'TAG': '*', 'TGC': 'C', 'GGA': 'G',\n 'CCA': 'P', 'GCC': 'A', 'CGA': 'R', 'AAA': 'K', 'GTG': 'V', 'CGC':\n 'R', 'CTG': 'L', 'TCG': 'S', 'TTG': 'L', 'GAA': 'E', 'GAG': 'E',\n 'TCT': 'S', 'ATT': 'I', 'AAC': 'N', 'ACT': 'T', 'TGT': 'C', 'CTA':\n 'L', 'ATG': 'M', 'CCC': 'P', 'AGC': 'S', 'TGA': '*'}\n F_position = 0\n R_position = 0\n Aa = ''\n for i in range(int(len(seq) / 3)):\n F_position = i * 3\n R_position = F_position + 3\n RNA_one = seq[F_position:R_position]\n Aa += RNA_AA_dict[RNA_one]\n return Aa\n\n\ndef Peptide_Encoding(DNA, AA_input):\n AA = DNA_To_AA(DNA)\n print(AA)\n l = len(AA_input)\n return_DNA = []\n find_position = 0\n while AA_input in AA[find_position:]:\n AA_position = find_position + AA[find_position:].find(AA_input)\n DNA_position = 3 * AA_position\n return_DNA.append(DNA[DNA_position:DNA_position + 3 * l])\n find_position = AA_position + 1\n return return_DNA\n\n\n<mask token>\nfor i in fileread:\n read = i.strip()\n DNA += read.upper()\nprint(DNA[:200])\n<mask token>\nif print1 != []:\n for i in print1:\n print('1', i)\n<mask token>\nif print2 != []:\n for i in print2:\n print('2', i)\n<mask token>\nif print3 != []:\n for i in print3:\n print('3', i)\n<mask token>\nif print4 != []:\n for i in print4:\n print('4', reversecomplement(i))\n<mask token>\nif print5 != []:\n for i in print5:\n print('5', reversecomplement(i))\n<mask token>\nif print6 != []:\n for i in print6:\n print('6', reversecomplement(i))\n",
"step-4": "def reverse_string(seq):\n return seq[::-1]\n\n\ndef complement(seq):\n seq = seq.upper()\n basecomplement = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A', 'N': 'N'}\n letters = list(seq)\n letters = [basecomplement[base] for base in letters]\n return ''.join(letters)\n\n\ndef reversecomplement(seq):\n seq = reverse_string(seq)\n seq = complement(seq)\n return seq\n\n\ndef DNA_To_AA(seq):\n RNA_AA_dict = {'TCC': 'S', 'TAC': 'Y', 'AGT': 'S', 'ACG': 'T', 'TAA':\n '*', 'TTA': 'L', 'GTC': 'V', 'CAC': 'H', 'CGT': 'R', 'CGG': 'R',\n 'CTC': 'L', 'AGG': 'R', 'ACA': 'T', 'TCA': 'S', 'CCT': 'P', 'CAG':\n 'Q', 'ACC': 'T', 'TTC': 'F', 'ATC': 'I', 'AAT': 'N', 'ATA': 'I',\n 'CAT': 'H', 'GGC': 'G', 'GGG': 'G', 'GCT': 'A', 'GAT': 'D', 'GCA':\n 'A', 'GCG': 'A', 'GTA': 'V', 'GAC': 'D', 'CTT': 'L', 'CAA': 'Q',\n 'CCG': 'P', 'AAG': 'K', 'GTT': 'V', 'GGT': 'G', 'TAT': 'Y', 'TGG':\n 'W', 'AGA': 'R', 'TTT': 'F', 'TAG': '*', 'TGC': 'C', 'GGA': 'G',\n 'CCA': 'P', 'GCC': 'A', 'CGA': 'R', 'AAA': 'K', 'GTG': 'V', 'CGC':\n 'R', 'CTG': 'L', 'TCG': 'S', 'TTG': 'L', 'GAA': 'E', 'GAG': 'E',\n 'TCT': 'S', 'ATT': 'I', 'AAC': 'N', 'ACT': 'T', 'TGT': 'C', 'CTA':\n 'L', 'ATG': 'M', 'CCC': 'P', 'AGC': 'S', 'TGA': '*'}\n F_position = 0\n R_position = 0\n Aa = ''\n for i in range(int(len(seq) / 3)):\n F_position = i * 3\n R_position = F_position + 3\n RNA_one = seq[F_position:R_position]\n Aa += RNA_AA_dict[RNA_one]\n return Aa\n\n\ndef Peptide_Encoding(DNA, AA_input):\n AA = DNA_To_AA(DNA)\n print(AA)\n l = len(AA_input)\n return_DNA = []\n find_position = 0\n while AA_input in AA[find_position:]:\n AA_position = find_position + AA[find_position:].find(AA_input)\n DNA_position = 3 * AA_position\n return_DNA.append(DNA[DNA_position:DNA_position + 3 * l])\n find_position = AA_position + 1\n return return_DNA\n\n\nDNA = ''\nfilename = input('Enter file name: ')\nfileread = open(filename, 'r')\nfor i in fileread:\n read = i.strip()\n DNA += read.upper()\nprint(DNA[:200])\nF_position = 0\nR_position = 0\nAa_input = input('what is the aa?')\nDNA_F_1 = DNA\nprint1 = Peptide_Encoding(DNA_F_1, Aa_input)\nif print1 != []:\n for i in print1:\n print('1', i)\nDNA_F_2 = DNA[1:]\nprint2 = Peptide_Encoding(DNA_F_2, Aa_input)\nif print2 != []:\n for i in print2:\n print('2', i)\nDNA_F_3 = DNA[2:]\nprint3 = Peptide_Encoding(DNA_F_3, Aa_input)\nif print3 != []:\n for i in print3:\n print('3', i)\nRC_DNA = reversecomplement(DNA)\nDNA_R_1 = RC_DNA\nprint4 = Peptide_Encoding(DNA_R_1, Aa_input)\nif print4 != []:\n for i in print4:\n print('4', reversecomplement(i))\nDNA_R_2 = RC_DNA[1:]\nprint5 = Peptide_Encoding(DNA_R_2, Aa_input)\nif print5 != []:\n for i in print5:\n print('5', reversecomplement(i))\nDNA_R_3 = RC_DNA[2:]\nprint6 = Peptide_Encoding(DNA_R_3, Aa_input)\nif print6 != []:\n for i in print6:\n print('6', reversecomplement(i))\n",
"step-5": "#Peptide Encoding Problem: Find substrings of a genome encoding a given amino acid sequence.\n# Input: A DNA string Text, an amino acid string Peptide, and the array GeneticCode.\n# Output: All substrings of Text encoding Peptide (if any such substrings exist).\ndef reverse_string(seq):\n return seq[::-1]\n\ndef complement(seq):\n#return the complementary sequence string.\n seq=seq.upper()\n basecomplement={\"A\":\"T\",\"C\":\"G\",\"G\":\"C\",\"T\":\"A\",\"N\":\"N\"}\n letters=list(seq)\n letters=[basecomplement[base] for base in letters]\n return ''.join(letters)\n\n\ndef reversecomplement(seq):\n #return the reverse complement of the dna string.\n seq=reverse_string(seq)\n seq=complement(seq)\n return seq\n\ndef DNA_To_AA(seq):\n RNA_AA_dict = {'TCC': 'S', 'TAC': 'Y', 'AGT': 'S', 'ACG': 'T', 'TAA': '*', 'TTA': 'L', 'GTC': 'V', 'CAC': 'H',\n 'CGT': 'R', 'CGG': 'R', 'CTC': 'L', 'AGG': 'R', 'ACA': 'T', 'TCA': 'S', 'CCT': 'P', 'CAG': 'Q',\n 'ACC': 'T', 'TTC': 'F', 'ATC': 'I', 'AAT': 'N', 'ATA': 'I', 'CAT': 'H', 'GGC': 'G', 'GGG': 'G',\n 'GCT': 'A', 'GAT': 'D', 'GCA': 'A', 'GCG': 'A', 'GTA': 'V', 'GAC': 'D', 'CTT': 'L', 'CAA': 'Q',\n 'CCG': 'P', 'AAG': 'K', 'GTT': 'V', 'GGT': 'G', 'TAT': 'Y', 'TGG': 'W', 'AGA': 'R', 'TTT': 'F',\n 'TAG': '*', 'TGC': 'C', 'GGA': 'G', 'CCA': 'P', 'GCC': 'A', 'CGA': 'R', 'AAA': 'K', 'GTG': 'V',\n 'CGC': 'R', 'CTG': 'L', 'TCG': 'S', 'TTG': 'L', 'GAA': 'E', 'GAG': 'E', 'TCT': 'S', 'ATT': 'I',\n 'AAC': 'N', 'ACT': 'T', 'TGT': 'C', 'CTA': 'L', 'ATG': 'M', 'CCC': 'P', 'AGC': 'S', 'TGA': '*'}\n F_position = 0\n R_position = 0\n Aa=\"\"\n for i in range(int(len(seq) / 3)):\n F_position = i*3\n R_position = F_position+3\n RNA_one=seq[F_position:R_position]\n #if RNA_one == \"TAA\" or RNA_one == \"TAG\" or RNA_one == \"TGA\":\n # break\n Aa += RNA_AA_dict[RNA_one]\n\n return Aa\n\ndef Peptide_Encoding(DNA,AA_input):\n AA= DNA_To_AA(DNA)\n print(AA)\n l=len(AA_input)\n return_DNA=[]\n find_position=0\n #print(DNA,AA,l,return_DNA,find_position)\n\n\n while AA_input in AA[find_position:]:\n #print(AA[find_position:])\n AA_position = find_position + AA[find_position:].find(AA_input)\n DNA_position = 3 * AA_position\n #print(AA_position, DNA_position)\n return_DNA.append(DNA[DNA_position:DNA_position + 3 * l])\n find_position = AA_position + 1\n #print(find_position)\n return return_DNA\n\n\n\nDNA=\"\"\nfilename = input(\"Enter file name: \")\nfileread = open(filename, \"r\")\nfor i in fileread:\n read = i.strip()\n DNA+=read.upper()\nprint(DNA[:200])\n\nF_position=0\nR_position=0\n\nAa_input=input(\"what is the aa?\")\n\n\n\nDNA_F_1=DNA\nprint1=Peptide_Encoding(DNA_F_1,Aa_input)\nif print1!=[]:\n for i in print1:\n print(\"1\",i)\n\n\nDNA_F_2=DNA[1:]\nprint2=Peptide_Encoding(DNA_F_2,Aa_input)\nif print2!=[]:\n for i in print2:\n print(\"2\",i)\n\nDNA_F_3=DNA[2:]\nprint3=Peptide_Encoding(DNA_F_3,Aa_input)\nif print3!=[]:\n for i in print3:\n print(\"3\",i)\n\nRC_DNA=reversecomplement(DNA)\n\nDNA_R_1=RC_DNA\nprint4=Peptide_Encoding(DNA_R_1,Aa_input)\nif print4!=[]:\n for i in print4:\n print(\"4\",reversecomplement(i))\n\nDNA_R_2=RC_DNA[1:]\nprint5=Peptide_Encoding(DNA_R_2,Aa_input)\nif print5!=[]:\n for i in print5:\n print(\"5\",reversecomplement(i))\n\nDNA_R_3=RC_DNA[2:]\nprint6=Peptide_Encoding(DNA_R_3,Aa_input)\nif print6!=[]:\n for i in print6:\n print(\"6\",reversecomplement(i))\n\n#print(DNA_F_1,DNA_F_2,DNA_F_3,DNA_R_1,DNA_R_2,DNA_R_3)\n#print(Aa_F_1,Aa_F_2,Aa_F_3,Aa_R_1,Aa_R_2,Aa_R_3)\n\n#根据AA序列在基因组中寻找相关序列\n\n\n\n\n#Bacillus brevis.txt\n#VKLFPWFNQY",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
import numpy as np
import sympy as sp
# (index: int, cos: bool)
# 0 1 1 2 2 3 3 4 4 5 5 ...
# {0, cos}, {1, cos}, {1, sen}, {2, cos}, {2, sen}, ...
alternatingRange = lambda m : [{'index': j, 'cos': True if k == 0 else False} for j in range(m + 1) for k in range(2 if j != 0 else 1)]
# data: "dict"
# data = {'x': [x-points], 'y': [y-points]}
def trigLSQ(data):
noPoints = len(data['x']) # N
order = int(noPoints/2) if int(noPoints/2) < noPoints/2 else int(noPoints/2)-1 # m
c = lambda a : np.array([np.cos(a * float(data['x'][i])) for i in range(noPoints)])
s = lambda a : np.array([np.sin(a * float(data['x'][i])) for i in range(noPoints)])
y = np.array([data['y'][i] for i in range(noPoints)])
# matrix * sol = res
matrix = np.array(
[[np.dot(c(i['index']) if i['cos'] else s(i['index']), c(j['index']) if j['cos'] else s(j['index'])) for i in alternatingRange(order)] for j in alternatingRange(order)]
)
res = [[np.dot(y, c(i['index']) if i['cos'] else s(i['index']))] for i in alternatingRange(order)]
sol = np.linalg.solve(matrix, res)
# F is the function approximation
F = 0
for j, i in enumerate(alternatingRange(order)): F += sol[j][0] * sp.sympify(('cos(' if i['cos'] else 'sin(') + str(i['index']) + ' * 2*pi/12 * x)')
return F
# x = 2kpi/N --> k = xN/2pi
|
normal
|
{
"blob_id": "98c2fdf0dfc9a660a3eb9a359aa9ca14d83c60ce",
"index": 4588,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef trigLSQ(data):\n noPoints = len(data['x'])\n order = int(noPoints / 2) if int(noPoints / 2) < noPoints / 2 else int(\n noPoints / 2) - 1\n c = lambda a: np.array([np.cos(a * float(data['x'][i])) for i in range(\n noPoints)])\n s = lambda a: np.array([np.sin(a * float(data['x'][i])) for i in range(\n noPoints)])\n y = np.array([data['y'][i] for i in range(noPoints)])\n matrix = np.array([[np.dot(c(i['index']) if i['cos'] else s(i['index']),\n c(j['index']) if j['cos'] else s(j['index'])) for i in\n alternatingRange(order)] for j in alternatingRange(order)])\n res = [[np.dot(y, c(i['index']) if i['cos'] else s(i['index']))] for i in\n alternatingRange(order)]\n sol = np.linalg.solve(matrix, res)\n F = 0\n for j, i in enumerate(alternatingRange(order)):\n F += sol[j][0] * sp.sympify(('cos(' if i['cos'] else 'sin(') + str(\n i['index']) + ' * 2*pi/12 * x)')\n return F\n",
"step-3": "<mask token>\nalternatingRange = lambda m: [{'index': j, 'cos': True if k == 0 else False\n } for j in range(m + 1) for k in range(2 if j != 0 else 1)]\n\n\ndef trigLSQ(data):\n noPoints = len(data['x'])\n order = int(noPoints / 2) if int(noPoints / 2) < noPoints / 2 else int(\n noPoints / 2) - 1\n c = lambda a: np.array([np.cos(a * float(data['x'][i])) for i in range(\n noPoints)])\n s = lambda a: np.array([np.sin(a * float(data['x'][i])) for i in range(\n noPoints)])\n y = np.array([data['y'][i] for i in range(noPoints)])\n matrix = np.array([[np.dot(c(i['index']) if i['cos'] else s(i['index']),\n c(j['index']) if j['cos'] else s(j['index'])) for i in\n alternatingRange(order)] for j in alternatingRange(order)])\n res = [[np.dot(y, c(i['index']) if i['cos'] else s(i['index']))] for i in\n alternatingRange(order)]\n sol = np.linalg.solve(matrix, res)\n F = 0\n for j, i in enumerate(alternatingRange(order)):\n F += sol[j][0] * sp.sympify(('cos(' if i['cos'] else 'sin(') + str(\n i['index']) + ' * 2*pi/12 * x)')\n return F\n",
"step-4": "import numpy as np\nimport sympy as sp\nalternatingRange = lambda m: [{'index': j, 'cos': True if k == 0 else False\n } for j in range(m + 1) for k in range(2 if j != 0 else 1)]\n\n\ndef trigLSQ(data):\n noPoints = len(data['x'])\n order = int(noPoints / 2) if int(noPoints / 2) < noPoints / 2 else int(\n noPoints / 2) - 1\n c = lambda a: np.array([np.cos(a * float(data['x'][i])) for i in range(\n noPoints)])\n s = lambda a: np.array([np.sin(a * float(data['x'][i])) for i in range(\n noPoints)])\n y = np.array([data['y'][i] for i in range(noPoints)])\n matrix = np.array([[np.dot(c(i['index']) if i['cos'] else s(i['index']),\n c(j['index']) if j['cos'] else s(j['index'])) for i in\n alternatingRange(order)] for j in alternatingRange(order)])\n res = [[np.dot(y, c(i['index']) if i['cos'] else s(i['index']))] for i in\n alternatingRange(order)]\n sol = np.linalg.solve(matrix, res)\n F = 0\n for j, i in enumerate(alternatingRange(order)):\n F += sol[j][0] * sp.sympify(('cos(' if i['cos'] else 'sin(') + str(\n i['index']) + ' * 2*pi/12 * x)')\n return F\n",
"step-5": "import numpy as np\nimport sympy as sp\n\n# (index: int, cos: bool)\n# 0 1 1 2 2 3 3 4 4 5 5 ...\n# {0, cos}, {1, cos}, {1, sen}, {2, cos}, {2, sen}, ...\nalternatingRange = lambda m : [{'index': j, 'cos': True if k == 0 else False} for j in range(m + 1) for k in range(2 if j != 0 else 1)]\n\n# data: \"dict\"\n# data = {'x': [x-points], 'y': [y-points]}\ndef trigLSQ(data):\n noPoints = len(data['x']) # N\n order = int(noPoints/2) if int(noPoints/2) < noPoints/2 else int(noPoints/2)-1 # m\n\n c = lambda a : np.array([np.cos(a * float(data['x'][i])) for i in range(noPoints)])\n s = lambda a : np.array([np.sin(a * float(data['x'][i])) for i in range(noPoints)])\n\n y = np.array([data['y'][i] for i in range(noPoints)])\n\n # matrix * sol = res\n\n matrix = np.array(\n [[np.dot(c(i['index']) if i['cos'] else s(i['index']), c(j['index']) if j['cos'] else s(j['index'])) for i in alternatingRange(order)] for j in alternatingRange(order)]\n )\n res = [[np.dot(y, c(i['index']) if i['cos'] else s(i['index']))] for i in alternatingRange(order)]\n sol = np.linalg.solve(matrix, res)\n\n # F is the function approximation\n F = 0\n for j, i in enumerate(alternatingRange(order)): F += sol[j][0] * sp.sympify(('cos(' if i['cos'] else 'sin(') + str(i['index']) + ' * 2*pi/12 * x)')\n\n return F\n\n# x = 2kpi/N --> k = xN/2pi",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.