code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
from django.conf import settings
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.contrib import admin
from .views import home, profile
from member.views import member
from publication.views import publication, pub_detail
from notice.views import list, notice_detail
from research.views import research
from protocol.views import protocol_list
urlpatterns = [
url(r'^$', home, name='home'),
url(r'prof/profile/$', profile, name='profile'),
url(r'members/$', member, name='member'),
url(r'researches/$', research, name='research'),
url(r'pub/(?P<type>[\w-]+)/$', publication, name='publication'),
url(r'pub/detail/(?P<pub_id>\d+)/$', pub_detail, name='pub_detail'),
url(r'notice/list/(?P<type>[\w-]+)/$', list, name='notice_list'),
url(r'notice/detail/(?P<notice_id>\d+)/$', notice_detail, name='notice_detail'),
url(r'protocol/list/$', protocol_list, name="protocol_list"),
url(r'^summernote/', include('django_summernote.urls')),
url(r'^admin/', admin.site.urls),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
normal
|
{
"blob_id": "a35e86e474883d892a6ce8eb191a3a5f8a9558c8",
"index": 1105,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif settings.DEBUG:\n urlpatterns += static(settings.STATIC_URL, document_root=settings.\n STATIC_ROOT)\n urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT\n )\n",
"step-3": "<mask token>\nurlpatterns = [url('^$', home, name='home'), url('prof/profile/$', profile,\n name='profile'), url('members/$', member, name='member'), url(\n 'researches/$', research, name='research'), url(\n 'pub/(?P<type>[\\\\w-]+)/$', publication, name='publication'), url(\n 'pub/detail/(?P<pub_id>\\\\d+)/$', pub_detail, name='pub_detail'), url(\n 'notice/list/(?P<type>[\\\\w-]+)/$', list, name='notice_list'), url(\n 'notice/detail/(?P<notice_id>\\\\d+)/$', notice_detail, name=\n 'notice_detail'), url('protocol/list/$', protocol_list, name=\n 'protocol_list'), url('^summernote/', include('django_summernote.urls')\n ), url('^admin/', admin.site.urls)]\nif settings.DEBUG:\n urlpatterns += static(settings.STATIC_URL, document_root=settings.\n STATIC_ROOT)\n urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT\n )\n",
"step-4": "from django.conf import settings\nfrom django.conf.urls import url, include\nfrom django.conf.urls.static import static\nfrom django.contrib import admin\nfrom .views import home, profile\nfrom member.views import member\nfrom publication.views import publication, pub_detail\nfrom notice.views import list, notice_detail\nfrom research.views import research\nfrom protocol.views import protocol_list\nurlpatterns = [url('^$', home, name='home'), url('prof/profile/$', profile,\n name='profile'), url('members/$', member, name='member'), url(\n 'researches/$', research, name='research'), url(\n 'pub/(?P<type>[\\\\w-]+)/$', publication, name='publication'), url(\n 'pub/detail/(?P<pub_id>\\\\d+)/$', pub_detail, name='pub_detail'), url(\n 'notice/list/(?P<type>[\\\\w-]+)/$', list, name='notice_list'), url(\n 'notice/detail/(?P<notice_id>\\\\d+)/$', notice_detail, name=\n 'notice_detail'), url('protocol/list/$', protocol_list, name=\n 'protocol_list'), url('^summernote/', include('django_summernote.urls')\n ), url('^admin/', admin.site.urls)]\nif settings.DEBUG:\n urlpatterns += static(settings.STATIC_URL, document_root=settings.\n STATIC_ROOT)\n urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT\n )\n",
"step-5": "from django.conf import settings\nfrom django.conf.urls import url, include\nfrom django.conf.urls.static import static\nfrom django.contrib import admin\n\nfrom .views import home, profile\nfrom member.views import member\nfrom publication.views import publication, pub_detail\nfrom notice.views import list, notice_detail\nfrom research.views import research\nfrom protocol.views import protocol_list\n\nurlpatterns = [\n url(r'^$', home, name='home'),\n url(r'prof/profile/$', profile, name='profile'),\n url(r'members/$', member, name='member'),\n url(r'researches/$', research, name='research'),\n url(r'pub/(?P<type>[\\w-]+)/$', publication, name='publication'),\n url(r'pub/detail/(?P<pub_id>\\d+)/$', pub_detail, name='pub_detail'),\n url(r'notice/list/(?P<type>[\\w-]+)/$', list, name='notice_list'),\n url(r'notice/detail/(?P<notice_id>\\d+)/$', notice_detail, name='notice_detail'),\n url(r'protocol/list/$', protocol_list, name=\"protocol_list\"),\n\n url(r'^summernote/', include('django_summernote.urls')),\n\n url(r'^admin/', admin.site.urls),\n]\n\nif settings.DEBUG:\n urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)\n urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def do_main_program():
print('start the main program...')
while True:
time.sleep(1)
print('another second passed')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def do_main_program():
print('start the main program...')
while True:
time.sleep(1)
print('another second passed')
<|reserved_special_token_0|>
with context:
print('start the main program')
do_main_program()
print('end ')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def do_main_program():
print('start the main program...')
while True:
time.sleep(1)
print('another second passed')
context = daemon.DaemonContext()
context.stdout = sys.stdout
context.stderr = sys.stderr
with context:
print('start the main program')
do_main_program()
print('end ')
<|reserved_special_token_1|>
import daemon
import time
import sys
def do_main_program():
print('start the main program...')
while True:
time.sleep(1)
print('another second passed')
context = daemon.DaemonContext()
context.stdout = sys.stdout
context.stderr = sys.stderr
with context:
print('start the main program')
do_main_program()
print('end ')
<|reserved_special_token_1|>
import daemon
import time
import sys
#out = open("~/tmp/stdout", "a+")
#err = open("~/tmp/stderr", "a+")
# 如果设定为标准输出,那么关闭终端窗口,退出守护进程。
# Ctrl+c 不会退出进程
# 关闭终端窗口,退出守护进程
def do_main_program():
print("start the main program...")
while True:
time.sleep(1)
print('another second passed')
context = daemon.DaemonContext()
context.stdout = sys.stdout
context.stderr = sys.stderr
with context:
print("start the main program")
do_main_program()
print("end ")
|
flexible
|
{
"blob_id": "3cb96607aaf58a7de3fa0a9cd61b7f4e3c6b061a",
"index": 4802,
"step-1": "<mask token>\n\n\ndef do_main_program():\n print('start the main program...')\n while True:\n time.sleep(1)\n print('another second passed')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef do_main_program():\n print('start the main program...')\n while True:\n time.sleep(1)\n print('another second passed')\n\n\n<mask token>\nwith context:\n print('start the main program')\n do_main_program()\nprint('end ')\n",
"step-3": "<mask token>\n\n\ndef do_main_program():\n print('start the main program...')\n while True:\n time.sleep(1)\n print('another second passed')\n\n\ncontext = daemon.DaemonContext()\ncontext.stdout = sys.stdout\ncontext.stderr = sys.stderr\nwith context:\n print('start the main program')\n do_main_program()\nprint('end ')\n",
"step-4": "import daemon\nimport time\nimport sys\n\n\ndef do_main_program():\n print('start the main program...')\n while True:\n time.sleep(1)\n print('another second passed')\n\n\ncontext = daemon.DaemonContext()\ncontext.stdout = sys.stdout\ncontext.stderr = sys.stderr\nwith context:\n print('start the main program')\n do_main_program()\nprint('end ')\n",
"step-5": "import daemon\nimport time\nimport sys\n\n#out = open(\"~/tmp/stdout\", \"a+\")\n#err = open(\"~/tmp/stderr\", \"a+\")\n# 如果设定为标准输出,那么关闭终端窗口,退出守护进程。\n# Ctrl+c 不会退出进程\n# 关闭终端窗口,退出守护进程\n\ndef do_main_program():\n print(\"start the main program...\")\n while True:\n time.sleep(1)\n print('another second passed')\n\n\ncontext = daemon.DaemonContext()\n\ncontext.stdout = sys.stdout\ncontext.stderr = sys.stderr\n\n\n\nwith context:\n print(\"start the main program\")\n do_main_program()\n\nprint(\"end \")",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#coding: utf-8
from django.conf.urls import patterns, url
import views
urlpatterns = patterns('',
url(r'^douban/books$', views.BookList.as_view()),
)
|
normal
|
{
"blob_id": "93418e554893db4eb888396e8d6f60a8364d9ee3",
"index": 8560,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = patterns('', url('^douban/books$', views.BookList.as_view()))\n",
"step-3": "from django.conf.urls import patterns, url\nimport views\nurlpatterns = patterns('', url('^douban/books$', views.BookList.as_view()))\n",
"step-4": "#coding: utf-8\n\nfrom django.conf.urls import patterns, url\n\nimport views\n\nurlpatterns = patterns('',\n url(r'^douban/books$', views.BookList.as_view()),\n)\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def validIPAddress(self, IP):
"""
:type IP: str
:rtype: str
"""
def validateIPv4(IP):
digits = IP.split('.')
if len(digits) != 4:
return False
for digitstr in digits:
if len(digitstr) > 3 or len(digitstr) <= 0:
return False
try:
digit = int(digitstr)
except:
return False
if digit > 255 or digit < 0:
return False
if len(str(digit)) != len(digitstr):
return False
return True
def validateIPv6(IP):
hexDigits = IP.split(':')
if len(hexDigits) != 8:
return False
for hexDigitStr in hexDigits:
if len(hexDigitStr) > 4 or len(hexDigitStr) <= 0:
return False
for char in hexDigitStr:
try:
int(char)
except:
if ord(char.lower()) - ord('a') < 0 or ord(char.lower()
) - ord('a') > 5:
return False
return True
if validateIPv4(IP):
return 'IPv4'
elif validateIPv6(IP):
return 'IPv6'
else:
return 'Neither'
<|reserved_special_token_1|>
class Solution:
def validIPAddress(self, IP):
"""
:type IP: str
:rtype: str
"""
def validateIPv4(IP):
digits = IP.split('.')
if len(digits) != 4:
return False
for digitstr in digits:
if len(digitstr) > 3 or len(digitstr) <= 0:
return False
try:
digit = int(digitstr)
except:
return False
# check range
if digit > 255 or digit < 0:
return False
# check leading 0s
if len(str(digit)) != len(digitstr):
return False
return True
def validateIPv6(IP):
hexDigits = IP.split(':')
if len(hexDigits) != 8:
return False
for hexDigitStr in hexDigits:
if len(hexDigitStr) > 4 or len(hexDigitStr) <= 0:
return False
for char in hexDigitStr:
# check hexadecimal digit
try:
int(char)
except:
if ord(char.lower()) - ord('a') < 0 or \
ord(char.lower()) - ord('a') > 5:
return False
return True
if validateIPv4(IP):
return 'IPv4'
elif validateIPv6(IP):
return 'IPv6'
else:
return 'Neither'
# print(Solution().validIPAddress("172.16.254.1"))
# print(Solution().validIPAddress("2001:0db8:85a3:0:0:8A2E:0370:7334"))
# print(Solution().validIPAddress("256.256.256.256"))
# print(Solution().validIPAddress("172.16.254.01"))
# print(Solution().validIPAddress("2001:db8:85a3:0:0:8A2E:0370:7334"))
# print(Solution().validIPAddress("2001:0db8:85a3::8A2E:0370:7334"))
# print(Solution().validIPAddress("10:0df8:85a3:0:0:8a2e:037:7334"))
# print(Solution().validIPAddress("120.25.2.10"))
|
flexible
|
{
"blob_id": "6216a5e45fee8ade5ec9072c42c1b08f3b0f4c65",
"index": 2433,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def validIPAddress(self, IP):\n \"\"\"\n :type IP: str\n :rtype: str\n \"\"\"\n\n def validateIPv4(IP):\n digits = IP.split('.')\n if len(digits) != 4:\n return False\n for digitstr in digits:\n if len(digitstr) > 3 or len(digitstr) <= 0:\n return False\n try:\n digit = int(digitstr)\n except:\n return False\n if digit > 255 or digit < 0:\n return False\n if len(str(digit)) != len(digitstr):\n return False\n return True\n\n def validateIPv6(IP):\n hexDigits = IP.split(':')\n if len(hexDigits) != 8:\n return False\n for hexDigitStr in hexDigits:\n if len(hexDigitStr) > 4 or len(hexDigitStr) <= 0:\n return False\n for char in hexDigitStr:\n try:\n int(char)\n except:\n if ord(char.lower()) - ord('a') < 0 or ord(char.lower()\n ) - ord('a') > 5:\n return False\n return True\n if validateIPv4(IP):\n return 'IPv4'\n elif validateIPv6(IP):\n return 'IPv6'\n else:\n return 'Neither'\n",
"step-4": "class Solution:\n def validIPAddress(self, IP):\n \"\"\"\n :type IP: str\n :rtype: str\n \"\"\"\n \n def validateIPv4(IP):\n digits = IP.split('.')\n if len(digits) != 4:\n return False\n for digitstr in digits:\n if len(digitstr) > 3 or len(digitstr) <= 0:\n return False\n try: \n digit = int(digitstr)\n except: \n return False\n # check range\n if digit > 255 or digit < 0:\n return False\n # check leading 0s\n if len(str(digit)) != len(digitstr):\n return False\n return True\n \n def validateIPv6(IP):\n hexDigits = IP.split(':')\n if len(hexDigits) != 8:\n return False\n for hexDigitStr in hexDigits:\n if len(hexDigitStr) > 4 or len(hexDigitStr) <= 0:\n return False\n\n for char in hexDigitStr:\n # check hexadecimal digit\n try:\n int(char)\n except:\n if ord(char.lower()) - ord('a') < 0 or \\\n ord(char.lower()) - ord('a') > 5:\n return False\n return True\n\n if validateIPv4(IP):\n return 'IPv4'\n elif validateIPv6(IP):\n return 'IPv6'\n else:\n return 'Neither'\n\n# print(Solution().validIPAddress(\"172.16.254.1\"))\n# print(Solution().validIPAddress(\"2001:0db8:85a3:0:0:8A2E:0370:7334\"))\n# print(Solution().validIPAddress(\"256.256.256.256\"))\n# print(Solution().validIPAddress(\"172.16.254.01\"))\n# print(Solution().validIPAddress(\"2001:db8:85a3:0:0:8A2E:0370:7334\"))\n# print(Solution().validIPAddress(\"2001:0db8:85a3::8A2E:0370:7334\"))\n# print(Solution().validIPAddress(\"10:0df8:85a3:0:0:8a2e:037:7334\"))\n# print(Solution().validIPAddress(\"120.25.2.10\"))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class MyManager(BaseManager):
pass
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MyManager(BaseManager):
pass
MyManager.register('LinphoneBase', LinphoneBase)
<|reserved_special_token_0|>
manager.start()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MyManager(BaseManager):
pass
MyManager.register('LinphoneBase', LinphoneBase)
manager = MyManager()
manager.start()
linphoneBase = manager.LinphoneBase()
<|reserved_special_token_1|>
from multiprocess.managers import BaseManager
from linphonebase import LinphoneBase
class MyManager(BaseManager):
pass
MyManager.register('LinphoneBase', LinphoneBase)
manager = MyManager()
manager.start()
linphoneBase = manager.LinphoneBase()
<|reserved_special_token_1|>
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from multiprocess.managers import BaseManager
from linphonebase import LinphoneBase
class MyManager(BaseManager):
pass
MyManager.register('LinphoneBase', LinphoneBase)
manager = MyManager()
manager.start()
linphoneBase = manager.LinphoneBase()
|
flexible
|
{
"blob_id": "3bb25cedc29f9063046329db1c00e7d9e10ce1cc",
"index": 5089,
"step-1": "<mask token>\n\n\nclass MyManager(BaseManager):\n pass\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MyManager(BaseManager):\n pass\n\n\nMyManager.register('LinphoneBase', LinphoneBase)\n<mask token>\nmanager.start()\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass MyManager(BaseManager):\n pass\n\n\nMyManager.register('LinphoneBase', LinphoneBase)\nmanager = MyManager()\nmanager.start()\nlinphoneBase = manager.LinphoneBase()\n",
"step-4": "from multiprocess.managers import BaseManager\nfrom linphonebase import LinphoneBase\n\n\nclass MyManager(BaseManager):\n pass\n\n\nMyManager.register('LinphoneBase', LinphoneBase)\nmanager = MyManager()\nmanager.start()\nlinphoneBase = manager.LinphoneBase()\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: UTF-8 -*-\n\nfrom multiprocess.managers import BaseManager\nfrom linphonebase import LinphoneBase\n\nclass MyManager(BaseManager):\n pass\n\nMyManager.register('LinphoneBase', LinphoneBase)\n\nmanager = MyManager()\nmanager.start()\nlinphoneBase = manager.LinphoneBase()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import datetime # to add timestamps on every block in blockchain
import hashlib # library that is ued to hash the block
import json # to communicate in json data
# Flask to implement webservices jsonify to see the jsop message/response
# request help us to connect all the nodes of the blockchain together froming the p2p network
from flask import Flask, jsonify, request
# it will help us to verify that all the blockchain have same blockhain or not http requests (used in replace_cahin)
import requests
from uuid import uuid4
from urllib.parse import urlparse
# Building a Blockchain
class Blockchain:
def __init__(self):
self.chain = [] # our main block chain
# now we will create the list of transation which will record the all transactions
self.transactions = []
# create_block used to create the block in blockchain so it is executed only when the block is mined(meaning it has winnnig proof_of_work=proof) proof=0 and previous_hash='0' for the genesis block
self.create_block(proof=0, previous_hash='0')
# nodes will contains the unique identifier of the address of all nodes in p2p network
self.nodes = set() # we have taken set() instead of list because we know that address are randomly generated by uuid4 to avoid duplicacy in it
# part1
def create_block(self, proof, previous_hash):
block = { # dictionary of python data structure
'index': len(self.chain)+1,
'timestamp': str(datetime.datetime.now()),
'proof': proof, # works like a nounce of block stops when we reach at or below the target
'previous_hash': previous_hash,
'transactions': self.transactions}
self.transactions = [] # this need to be done bcoz we cant have duplicates lists of transactions in the further blocks so empty the transation that had been added in the block
self.chain.append(block)
return block
def get_previous_block(self):
return self.chain[-1]
def proof_of_work(self, previous_proof):
new_proof = 1
check_proof = False
while check_proof is False:
hash_operation = hashlib.sha256(
str(new_proof**2-previous_proof**2).encode()).hexdigest()
if hash_operation[:4] == '0000':
check_proof = True
else:
new_proof += 1
return new_proof # it is just a no. corresponding to the game solved by person is having a hash with trailing 4 zeroe's
# hash of a block is created after generating block thats we have only use previous_hash because its already created
def hash(self, block):
encoded_block = json.dumps(block, sort_keys=True).encode()
return hashlib.sha256(encoded_block).hexdigest()
def is_chain_valid(self, chain):
# reference of first block stored genesis block
previous_block = chain[0]
block_index = 1 # required for iteration
while block_index < len(chain):
block = chain[block_index] # cuurent block
# checking weather the refernce stored in property previus_hash is currently matched or not with the hash of previous block using hash function
if block['previous_hash'] != self.hash(previous_block):
return False
previous_proof = previous_block['proof']
proof = block['proof']
# verfying the proof of block with the data proof and previous proof it is easy then creating the proof
hash_operation = hashlib.sha256(
str(proof**2 - previous_proof**2).encode()).hexdigest()
# the more is zero's the more is harder to mine the block
if hash_operation[:4] != '0000':
return False
previous_block = block
block_index += 1
return True
# functions used to get add the transactions to the lists
def add_transaction(self, senders, receiver, amount):
self.transactions.append({
'senders': senders,
'receiver': receiver,
'amount': amount
})
previous_block = self.get_previous_block()
# +1 beacause before mining the transaction are added so new_block index will be +1 then previous
return previous_block['index']+1
# part-1 ends
# part-3--> dealing with decentarlized application and transactions
# this function allow us to add different nodes to chain
def add_node(self, address): # generating the decentarlized application
# we need to parse the url before adding it
parsed_url = urlparse(address)
# .netloc gives us the unique identifier of the node address removing the unrequired part from it
self.nodes.add(parsed_url.netloc)
# this function help us to solve the problem of consensus protocols (competing chain)
def replace_chain(self):
# this variable help us to find the length of longest chain among different network
max_length = len(self.chain)
longest_chain = None
network = self.nodes # this variable will hold the address of all the nodes in network
for node in network:
# we know the nodes array will hold only the netlock value in nodes so we are going to use taht and make a request to that node check its length
# using the requests library we make a requests to that node address ([f'http://{node}/get_chain'] --> [f'http://127.0.0.5000/get_chain')]
response = requests.get(f'http://{node}/get_chain')
if response.status_code == 200: # this ids the vode chaeck something is received in request
length = response.json()['length']
chain = response.json()['chain']
if length > max_length and self.is_chain_valid(chain):
max_length = length
longest_chain = chain
# this will happen in every node of network
if longest_chain:
# if this chain is shorter than otherit will be updated
self.chain = longest_chain
return True
# if this chain is only longest in network than return false and no update
return False
# part-3 ends
# Mining our Blockchain
app = Flask(__name__)
# Creating a Blockchain
# creating the instance of blockchain
blockchain = Blockchain()
# Mining the blockchain
# create an random and unique address for the node on port 5000
# this is the address used by to send the whale coin when the miner mines the wahle coin
node_address = str(uuid4()).replace('-', '')
# part-2
@app.route('/mine_block', methods=['GET'])
def mine_block():
previous_block = blockchain.get_previous_block()
previous_proof = previous_block['proof']
proof = blockchain.proof_of_work(previous_proof)
previous_hash = blockchain.hash(previous_block)
# miners price
# usually the reciever public address is created when user generate the wallet and mining pool send the coin after mining the block to miner address present in the bat file which is edited after downloading the software
blockchain.add_transaction(node_address, 'Bhavjot', 1)
# when created blockchain is called all the transactions performed will be inserted inside the current created block and when appended in transactions it will be again change to [] empty to avoid the duplicacy
block = blockchain.create_block(proof, previous_hash)
response = {'message': 'Congratulations, you just mined a block! 😈😈😈😈😈🤓🤓🤓', # response is a json data
'index': block['index'],
'timestamp': block['timestamp'],
'proof': block['proof'],
'previous_hash': block['previous_hash'],
'transactions': block['transactions']}
return jsonify(response), 200
# getting all blocks in chain
@app.route('/get_chain', methods=['GET'])
def get_chain():
response = {
'chain': blockchain.chain,
'length': len(blockchain.chain)
}
return jsonify(response), 200
# custom message
@app.route('/', methods=['GET'])
def custom_message():
response = {
'message': 'Congratulations you are on Whalecoin 🐳🐳🐳🐳🐳🐳'
}
return jsonify(response), 200
# part-2 ends
# creating the transactions
@app.route('/add_transactions', methods=['POST'])
def add_transaction():
# this will help us to extract te post request made in postman like req.params.name in express
json = request.get_json()
# this will hep us to check that all the parameters are present or not for adding the transactions
transaction_keys = ['sender', 'receiver', 'amount']
if not all(key in json for key in transaction_keys):
return 'Some elements of the transaction are missing', 400
index = blockchain.add_transaction(
json['sender'], json['receiver'], json['amount'])
# when the block is mined all the transations in lists is added to block
response = {'message': f'This transaction will be added to Block {index}'}
return jsonify(response), 201
@app.route('/connect_node', methods=['POST'])
def connect_node():
json = request.get_json() # we will get request message send from postman
# {'nodes':['http://127.0.0.1:5000','http://127.0.0.1:5001','http://127.0.0.1:5003',...]} when adding nodes using add_nodes 127.0.0.1:5001 it will be extracted using netloc
nodes = json.get('nodes')
if nodes is None:
return "No node", 400
for node in nodes:
blockchain.add_node(node) # add our nodes to network
response = {'message': 'All the nodes are now connected. The Whalecoin 🐳🐳🐳🐳🐳🐳 Blockchain now contains the following nodes:',
'total_nodes': list(blockchain.nodes)}
return jsonify(response), 201
# Replacing the chain by the longest chain if needed
# this function will present in every node of blockchain and always checked so that the node remain upadatesd with other blockchains by hitiing replace_chain URL
@ app.route('/replace_chain', methods=['GET'])
def replace_chain():
# using the above defined function in class
is_chain_replaced = blockchain.replace_chain()
if is_chain_replaced: # means the current blockchain was the shortest one and it is replaced
response = {'message': 'The nodes had different chains so the chain was replaced by the longest one.',
'new_chain': blockchain.chain}
else: # means the current blockchain was not the shortest one and it is not replaced
response = {'message': 'All good. The chain is the largest one.',
'actual_chain': blockchain.chain}
return jsonify(response), 200
# Running the app
# host= '0.0.0.0' specifies that it is available publicily
app.run(host='0.0.0.0', port=5001)
|
normal
|
{
"blob_id": "e85d3660968410b83b14ba610150c0c8cc880119",
"index": 9191,
"step-1": "<mask token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.transactions = []\n self.create_block(proof=0, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'transactions': self.transactions}\n self.transactions = []\n self.chain.append(block)\n return block\n <mask token>\n <mask token>\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n <mask token>\n\n def add_transaction(self, senders, receiver, amount):\n self.transactions.append({'senders': senders, 'receiver': receiver,\n 'amount': amount})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n <mask token>\n\n def replace_chain(self):\n max_length = len(self.chain)\n longest_chain = None\n network = self.nodes\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.transactions = []\n self.create_block(proof=0, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'transactions': self.transactions}\n self.transactions = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_transaction(self, senders, receiver, amount):\n self.transactions.append({'senders': senders, 'receiver': receiver,\n 'amount': amount})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n max_length = len(self.chain)\n longest_chain = None\n network = self.nodes\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<mask token>\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n blockchain.add_transaction(node_address, 'Bhavjot', 1)\n block = blockchain.create_block(proof, previous_hash)\n response = {'message':\n 'Congratulations, you just mined a block! 😈😈😈😈😈🤓🤓🤓', 'index': block\n ['index'], 'timestamp': block['timestamp'], 'proof': block['proof'],\n 'previous_hash': block['previous_hash'], 'transactions': block[\n 'transactions']}\n return jsonify(response), 200\n\n\[email protected]('/get_chain', methods=['GET'])\ndef get_chain():\n response = {'chain': blockchain.chain, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\n<mask token>\n\n\[email protected]('/add_transactions', methods=['POST'])\ndef add_transaction():\n json = request.get_json()\n transaction_keys = ['sender', 'receiver', 'amount']\n if not all(key in json for key in transaction_keys):\n return 'Some elements of the transaction are missing', 400\n index = blockchain.add_transaction(json['sender'], json['receiver'],\n json['amount'])\n response = {'message': f'This transaction will be added to Block {index}'}\n return jsonify(response), 201\n\n\[email protected]('/connect_node', methods=['POST'])\ndef connect_node():\n json = request.get_json()\n nodes = json.get('nodes')\n if nodes is None:\n return 'No node', 400\n for node in nodes:\n blockchain.add_node(node)\n response = {'message':\n 'All the nodes are now connected. The Whalecoin 🐳🐳🐳🐳🐳🐳 Blockchain now contains the following nodes:'\n , 'total_nodes': list(blockchain.nodes)}\n return jsonify(response), 201\n\n\[email protected]('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.transactions = []\n self.create_block(proof=0, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'transactions': self.transactions}\n self.transactions = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_transaction(self, senders, receiver, amount):\n self.transactions.append({'senders': senders, 'receiver': receiver,\n 'amount': amount})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n max_length = len(self.chain)\n longest_chain = None\n network = self.nodes\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<mask token>\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n blockchain.add_transaction(node_address, 'Bhavjot', 1)\n block = blockchain.create_block(proof, previous_hash)\n response = {'message':\n 'Congratulations, you just mined a block! 😈😈😈😈😈🤓🤓🤓', 'index': block\n ['index'], 'timestamp': block['timestamp'], 'proof': block['proof'],\n 'previous_hash': block['previous_hash'], 'transactions': block[\n 'transactions']}\n return jsonify(response), 200\n\n\[email protected]('/get_chain', methods=['GET'])\ndef get_chain():\n response = {'chain': blockchain.chain, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\[email protected]('/', methods=['GET'])\ndef custom_message():\n response = {'message': 'Congratulations you are on Whalecoin 🐳🐳🐳🐳🐳🐳'}\n return jsonify(response), 200\n\n\[email protected]('/add_transactions', methods=['POST'])\ndef add_transaction():\n json = request.get_json()\n transaction_keys = ['sender', 'receiver', 'amount']\n if not all(key in json for key in transaction_keys):\n return 'Some elements of the transaction are missing', 400\n index = blockchain.add_transaction(json['sender'], json['receiver'],\n json['amount'])\n response = {'message': f'This transaction will be added to Block {index}'}\n return jsonify(response), 201\n\n\[email protected]('/connect_node', methods=['POST'])\ndef connect_node():\n json = request.get_json()\n nodes = json.get('nodes')\n if nodes is None:\n return 'No node', 400\n for node in nodes:\n blockchain.add_node(node)\n response = {'message':\n 'All the nodes are now connected. The Whalecoin 🐳🐳🐳🐳🐳🐳 Blockchain now contains the following nodes:'\n , 'total_nodes': list(blockchain.nodes)}\n return jsonify(response), 201\n\n\[email protected]('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\napp.run(host='0.0.0.0', port=5001)\n",
"step-4": "import datetime\nimport hashlib\nimport json\nfrom flask import Flask, jsonify, request\nimport requests\nfrom uuid import uuid4\nfrom urllib.parse import urlparse\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.transactions = []\n self.create_block(proof=0, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'transactions': self.transactions}\n self.transactions = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_transaction(self, senders, receiver, amount):\n self.transactions.append({'senders': senders, 'receiver': receiver,\n 'amount': amount})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n max_length = len(self.chain)\n longest_chain = None\n network = self.nodes\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\napp = Flask(__name__)\nblockchain = Blockchain()\nnode_address = str(uuid4()).replace('-', '')\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n blockchain.add_transaction(node_address, 'Bhavjot', 1)\n block = blockchain.create_block(proof, previous_hash)\n response = {'message':\n 'Congratulations, you just mined a block! 😈😈😈😈😈🤓🤓🤓', 'index': block\n ['index'], 'timestamp': block['timestamp'], 'proof': block['proof'],\n 'previous_hash': block['previous_hash'], 'transactions': block[\n 'transactions']}\n return jsonify(response), 200\n\n\[email protected]('/get_chain', methods=['GET'])\ndef get_chain():\n response = {'chain': blockchain.chain, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\[email protected]('/', methods=['GET'])\ndef custom_message():\n response = {'message': 'Congratulations you are on Whalecoin 🐳🐳🐳🐳🐳🐳'}\n return jsonify(response), 200\n\n\[email protected]('/add_transactions', methods=['POST'])\ndef add_transaction():\n json = request.get_json()\n transaction_keys = ['sender', 'receiver', 'amount']\n if not all(key in json for key in transaction_keys):\n return 'Some elements of the transaction are missing', 400\n index = blockchain.add_transaction(json['sender'], json['receiver'],\n json['amount'])\n response = {'message': f'This transaction will be added to Block {index}'}\n return jsonify(response), 201\n\n\[email protected]('/connect_node', methods=['POST'])\ndef connect_node():\n json = request.get_json()\n nodes = json.get('nodes')\n if nodes is None:\n return 'No node', 400\n for node in nodes:\n blockchain.add_node(node)\n response = {'message':\n 'All the nodes are now connected. The Whalecoin 🐳🐳🐳🐳🐳🐳 Blockchain now contains the following nodes:'\n , 'total_nodes': list(blockchain.nodes)}\n return jsonify(response), 201\n\n\[email protected]('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\napp.run(host='0.0.0.0', port=5001)\n",
"step-5": "import datetime # to add timestamps on every block in blockchain\nimport hashlib # library that is ued to hash the block\nimport json # to communicate in json data\n# Flask to implement webservices jsonify to see the jsop message/response\n# request help us to connect all the nodes of the blockchain together froming the p2p network\nfrom flask import Flask, jsonify, request\n# it will help us to verify that all the blockchain have same blockhain or not http requests (used in replace_cahin)\nimport requests\nfrom uuid import uuid4\nfrom urllib.parse import urlparse\n\n# Building a Blockchain\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = [] # our main block chain\n # now we will create the list of transation which will record the all transactions\n self.transactions = []\n # create_block used to create the block in blockchain so it is executed only when the block is mined(meaning it has winnnig proof_of_work=proof) proof=0 and previous_hash='0' for the genesis block\n self.create_block(proof=0, previous_hash='0')\n # nodes will contains the unique identifier of the address of all nodes in p2p network\n self.nodes = set() # we have taken set() instead of list because we know that address are randomly generated by uuid4 to avoid duplicacy in it\n # part1\n\n def create_block(self, proof, previous_hash):\n block = { # dictionary of python data structure\n 'index': len(self.chain)+1,\n 'timestamp': str(datetime.datetime.now()),\n 'proof': proof, # works like a nounce of block stops when we reach at or below the target\n 'previous_hash': previous_hash,\n 'transactions': self.transactions}\n self.transactions = [] # this need to be done bcoz we cant have duplicates lists of transactions in the further blocks so empty the transation that had been added in the block\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(\n str(new_proof**2-previous_proof**2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof # it is just a no. corresponding to the game solved by person is having a hash with trailing 4 zeroe's\n\n # hash of a block is created after generating block thats we have only use previous_hash because its already created\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n # reference of first block stored genesis block\n previous_block = chain[0]\n block_index = 1 # required for iteration\n while block_index < len(chain):\n block = chain[block_index] # cuurent block\n # checking weather the refernce stored in property previus_hash is currently matched or not with the hash of previous block using hash function\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n # verfying the proof of block with the data proof and previous proof it is easy then creating the proof\n hash_operation = hashlib.sha256(\n str(proof**2 - previous_proof**2).encode()).hexdigest()\n # the more is zero's the more is harder to mine the block\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n # functions used to get add the transactions to the lists\n def add_transaction(self, senders, receiver, amount):\n self.transactions.append({\n 'senders': senders,\n 'receiver': receiver,\n 'amount': amount\n })\n previous_block = self.get_previous_block()\n # +1 beacause before mining the transaction are added so new_block index will be +1 then previous\n return previous_block['index']+1\n # part-1 ends\n\n # part-3--> dealing with decentarlized application and transactions\n\n # this function allow us to add different nodes to chain\n\n def add_node(self, address): # generating the decentarlized application\n # we need to parse the url before adding it\n parsed_url = urlparse(address)\n # .netloc gives us the unique identifier of the node address removing the unrequired part from it\n self.nodes.add(parsed_url.netloc)\n\n # this function help us to solve the problem of consensus protocols (competing chain)\n\n def replace_chain(self):\n # this variable help us to find the length of longest chain among different network\n max_length = len(self.chain)\n longest_chain = None\n network = self.nodes # this variable will hold the address of all the nodes in network\n for node in network:\n # we know the nodes array will hold only the netlock value in nodes so we are going to use taht and make a request to that node check its length\n # using the requests library we make a requests to that node address ([f'http://{node}/get_chain'] --> [f'http://127.0.0.5000/get_chain')]\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200: # this ids the vode chaeck something is received in request\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n # this will happen in every node of network\n if longest_chain:\n # if this chain is shorter than otherit will be updated\n self.chain = longest_chain\n return True\n # if this chain is only longest in network than return false and no update\n return False\n # part-3 ends\n# Mining our Blockchain\n\n\napp = Flask(__name__)\n\n# Creating a Blockchain\n# creating the instance of blockchain\nblockchain = Blockchain()\n\n# Mining the blockchain\n# create an random and unique address for the node on port 5000\n# this is the address used by to send the whale coin when the miner mines the wahle coin\nnode_address = str(uuid4()).replace('-', '')\n\n# part-2\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n # miners price\n # usually the reciever public address is created when user generate the wallet and mining pool send the coin after mining the block to miner address present in the bat file which is edited after downloading the software\n blockchain.add_transaction(node_address, 'Bhavjot', 1)\n # when created blockchain is called all the transactions performed will be inserted inside the current created block and when appended in transactions it will be again change to [] empty to avoid the duplicacy\n block = blockchain.create_block(proof, previous_hash)\n response = {'message': 'Congratulations, you just mined a block! 😈😈😈😈😈🤓🤓🤓', # response is a json data\n 'index': block['index'],\n 'timestamp': block['timestamp'],\n 'proof': block['proof'],\n 'previous_hash': block['previous_hash'],\n 'transactions': block['transactions']}\n return jsonify(response), 200\n\n# getting all blocks in chain\n\n\[email protected]('/get_chain', methods=['GET'])\ndef get_chain():\n response = {\n 'chain': blockchain.chain,\n 'length': len(blockchain.chain)\n }\n return jsonify(response), 200\n\n# custom message\n\n\[email protected]('/', methods=['GET'])\ndef custom_message():\n response = {\n 'message': 'Congratulations you are on Whalecoin 🐳🐳🐳🐳🐳🐳'\n }\n return jsonify(response), 200\n\n# part-2 ends\n# creating the transactions\n\n\[email protected]('/add_transactions', methods=['POST'])\ndef add_transaction():\n # this will help us to extract te post request made in postman like req.params.name in express\n json = request.get_json()\n # this will hep us to check that all the parameters are present or not for adding the transactions\n transaction_keys = ['sender', 'receiver', 'amount']\n if not all(key in json for key in transaction_keys):\n return 'Some elements of the transaction are missing', 400\n index = blockchain.add_transaction(\n json['sender'], json['receiver'], json['amount'])\n # when the block is mined all the transations in lists is added to block\n response = {'message': f'This transaction will be added to Block {index}'}\n return jsonify(response), 201\n\n\[email protected]('/connect_node', methods=['POST'])\ndef connect_node():\n json = request.get_json() # we will get request message send from postman\n # {'nodes':['http://127.0.0.1:5000','http://127.0.0.1:5001','http://127.0.0.1:5003',...]} when adding nodes using add_nodes 127.0.0.1:5001 it will be extracted using netloc\n nodes = json.get('nodes')\n if nodes is None:\n return \"No node\", 400\n for node in nodes:\n blockchain.add_node(node) # add our nodes to network\n response = {'message': 'All the nodes are now connected. The Whalecoin 🐳🐳🐳🐳🐳🐳 Blockchain now contains the following nodes:',\n 'total_nodes': list(blockchain.nodes)}\n return jsonify(response), 201\n\n\n# Replacing the chain by the longest chain if needed\n# this function will present in every node of blockchain and always checked so that the node remain upadatesd with other blockchains by hitiing replace_chain URL\n@ app.route('/replace_chain', methods=['GET'])\ndef replace_chain():\n # using the above defined function in class\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced: # means the current blockchain was the shortest one and it is replaced\n response = {'message': 'The nodes had different chains so the chain was replaced by the longest one.',\n 'new_chain': blockchain.chain}\n else: # means the current blockchain was not the shortest one and it is not replaced\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\n# Running the app\n# host= '0.0.0.0' specifies that it is available publicily\napp.run(host='0.0.0.0', port=5001)\n",
"step-ids": [
6,
15,
17,
19,
20
]
}
|
[
6,
15,
17,
19,
20
] |
# -*- coding: utf-8 -*-
#############################################################################
#
# Copyright (C) 2019-Antti Kärki.
# Author: Antti Kärki.
#
# You can modify it under the terms of the GNU AFFERO
# GENERAL PUBLIC LICENSE (AGPL v3), Version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE (AGPL v3) for more details.
#
# You should have received a copy of the GNU AFFERO GENERAL PUBLIC LICENSE
# (AGPL v3) along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
from odoo import api, fields, models
from odoo import exceptions
import logging
_logger = logging.getLogger(__name__)
class rocker_connection():
@api.multi
def create_connection(self):
_database_record = self
_datasource = _database_record.name
_driver = _database_record.driver
_odbcdriver = _database_record.odbcdriver
_sid = _database_record.database
_database = _database_record.database
_host = _database_record.host
_port = _database_record.port
_user = _database_record.user
_password = _database_record.password
con = None
_logger.info('Connecting to database: ' + _database)
try:
if _driver == 'postgresql':
try:
import psycopg2
except:
raise exceptions.ValidationError('No Postgres drivers')
con = psycopg2.connect(host=_host, port=_port, database=_database, user=_user, password=_password)
elif _driver == "mysql":
try:
import mysql.connector
except:
raise exceptions.ValidationError('No MySQL drivers')
con = mysql.connector.connect(host=_host, port=_port, database=_database, user=_user,
password=_password)
elif _driver == "mariadb":
try:
import mysql.connector
except:
raise exceptions.ValidationError('No MariaDB drivers')
con = mysql.connector.connect(host=_host, port=_port, database=_database, user=_user,
password=_password)
elif _driver == "oracle":
try:
import cx_Oracle
except:
raise exceptions.ValidationError('No Oracle drivers')
con = cx_Oracle.connect(_user + '/' + _password + '@//' + _host + ':' + _port + '/' + _sid)
elif _driver == "sqlserver":
try:
import pyodbc
except:
raise exceptions.ValidationError('No SQLServer (ODBC) drivers')
_logger.debug(
'DRIVER={' + _odbcdriver + '};SERVER=' + _host + ';DATABASE=' + _database + ';UID=' + _user + ';PWD=' + _password)
con = pyodbc.connect(
'DRIVER={' + _odbcdriver + '};SERVER=' + _host + ';DATABASE=' + _database + ';UID=' + _user + ';PWD=' + _password)
self._sqldriver = 'sqlserver'
elif _driver == "odbc":
try:
import pyodbc
except:
raise exceptions.ValidationError('No ODBC drivers')
_logger.debug(
'DRIVER={' + _odbcdriver + '};SERVER=' + _host + ';DATABASE=' + _database + ';UID=' + _user + ';PWD=' + _password)
con = pyodbc.connect(
'DRIVER={' + _odbcdriver + '};SERVER=' + _host + ';DATABASE=' + _database + ';UID=' + _user + ';PWD=' + _password)
self._sqldriver = 'odbc'
else:
raise exceptions.ValidationError('Driver not supported')
except:
raise exceptions.ValidationError('Database connection failed')
return con
|
normal
|
{
"blob_id": "96131e3d6c67c0ee4ff7f69d4ffedcbf96470f14",
"index": 7069,
"step-1": "<mask token>\n\n\nclass rocker_connection:\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass rocker_connection:\n\n @api.multi\n def create_connection(self):\n _database_record = self\n _datasource = _database_record.name\n _driver = _database_record.driver\n _odbcdriver = _database_record.odbcdriver\n _sid = _database_record.database\n _database = _database_record.database\n _host = _database_record.host\n _port = _database_record.port\n _user = _database_record.user\n _password = _database_record.password\n con = None\n _logger.info('Connecting to database: ' + _database)\n try:\n if _driver == 'postgresql':\n try:\n import psycopg2\n except:\n raise exceptions.ValidationError('No Postgres drivers')\n con = psycopg2.connect(host=_host, port=_port, database=\n _database, user=_user, password=_password)\n elif _driver == 'mysql':\n try:\n import mysql.connector\n except:\n raise exceptions.ValidationError('No MySQL drivers')\n con = mysql.connector.connect(host=_host, port=_port,\n database=_database, user=_user, password=_password)\n elif _driver == 'mariadb':\n try:\n import mysql.connector\n except:\n raise exceptions.ValidationError('No MariaDB drivers')\n con = mysql.connector.connect(host=_host, port=_port,\n database=_database, user=_user, password=_password)\n elif _driver == 'oracle':\n try:\n import cx_Oracle\n except:\n raise exceptions.ValidationError('No Oracle drivers')\n con = cx_Oracle.connect(_user + '/' + _password + '@//' +\n _host + ':' + _port + '/' + _sid)\n elif _driver == 'sqlserver':\n try:\n import pyodbc\n except:\n raise exceptions.ValidationError(\n 'No SQLServer (ODBC) drivers')\n _logger.debug('DRIVER={' + _odbcdriver + '};SERVER=' +\n _host + ';DATABASE=' + _database + ';UID=' + _user +\n ';PWD=' + _password)\n con = pyodbc.connect('DRIVER={' + _odbcdriver + '};SERVER=' +\n _host + ';DATABASE=' + _database + ';UID=' + _user +\n ';PWD=' + _password)\n self._sqldriver = 'sqlserver'\n elif _driver == 'odbc':\n try:\n import pyodbc\n except:\n raise exceptions.ValidationError('No ODBC drivers')\n _logger.debug('DRIVER={' + _odbcdriver + '};SERVER=' +\n _host + ';DATABASE=' + _database + ';UID=' + _user +\n ';PWD=' + _password)\n con = pyodbc.connect('DRIVER={' + _odbcdriver + '};SERVER=' +\n _host + ';DATABASE=' + _database + ';UID=' + _user +\n ';PWD=' + _password)\n self._sqldriver = 'odbc'\n else:\n raise exceptions.ValidationError('Driver not supported')\n except:\n raise exceptions.ValidationError('Database connection failed')\n return con\n",
"step-3": "<mask token>\n_logger = logging.getLogger(__name__)\n\n\nclass rocker_connection:\n\n @api.multi\n def create_connection(self):\n _database_record = self\n _datasource = _database_record.name\n _driver = _database_record.driver\n _odbcdriver = _database_record.odbcdriver\n _sid = _database_record.database\n _database = _database_record.database\n _host = _database_record.host\n _port = _database_record.port\n _user = _database_record.user\n _password = _database_record.password\n con = None\n _logger.info('Connecting to database: ' + _database)\n try:\n if _driver == 'postgresql':\n try:\n import psycopg2\n except:\n raise exceptions.ValidationError('No Postgres drivers')\n con = psycopg2.connect(host=_host, port=_port, database=\n _database, user=_user, password=_password)\n elif _driver == 'mysql':\n try:\n import mysql.connector\n except:\n raise exceptions.ValidationError('No MySQL drivers')\n con = mysql.connector.connect(host=_host, port=_port,\n database=_database, user=_user, password=_password)\n elif _driver == 'mariadb':\n try:\n import mysql.connector\n except:\n raise exceptions.ValidationError('No MariaDB drivers')\n con = mysql.connector.connect(host=_host, port=_port,\n database=_database, user=_user, password=_password)\n elif _driver == 'oracle':\n try:\n import cx_Oracle\n except:\n raise exceptions.ValidationError('No Oracle drivers')\n con = cx_Oracle.connect(_user + '/' + _password + '@//' +\n _host + ':' + _port + '/' + _sid)\n elif _driver == 'sqlserver':\n try:\n import pyodbc\n except:\n raise exceptions.ValidationError(\n 'No SQLServer (ODBC) drivers')\n _logger.debug('DRIVER={' + _odbcdriver + '};SERVER=' +\n _host + ';DATABASE=' + _database + ';UID=' + _user +\n ';PWD=' + _password)\n con = pyodbc.connect('DRIVER={' + _odbcdriver + '};SERVER=' +\n _host + ';DATABASE=' + _database + ';UID=' + _user +\n ';PWD=' + _password)\n self._sqldriver = 'sqlserver'\n elif _driver == 'odbc':\n try:\n import pyodbc\n except:\n raise exceptions.ValidationError('No ODBC drivers')\n _logger.debug('DRIVER={' + _odbcdriver + '};SERVER=' +\n _host + ';DATABASE=' + _database + ';UID=' + _user +\n ';PWD=' + _password)\n con = pyodbc.connect('DRIVER={' + _odbcdriver + '};SERVER=' +\n _host + ';DATABASE=' + _database + ';UID=' + _user +\n ';PWD=' + _password)\n self._sqldriver = 'odbc'\n else:\n raise exceptions.ValidationError('Driver not supported')\n except:\n raise exceptions.ValidationError('Database connection failed')\n return con\n",
"step-4": "from odoo import api, fields, models\nfrom odoo import exceptions\nimport logging\n_logger = logging.getLogger(__name__)\n\n\nclass rocker_connection:\n\n @api.multi\n def create_connection(self):\n _database_record = self\n _datasource = _database_record.name\n _driver = _database_record.driver\n _odbcdriver = _database_record.odbcdriver\n _sid = _database_record.database\n _database = _database_record.database\n _host = _database_record.host\n _port = _database_record.port\n _user = _database_record.user\n _password = _database_record.password\n con = None\n _logger.info('Connecting to database: ' + _database)\n try:\n if _driver == 'postgresql':\n try:\n import psycopg2\n except:\n raise exceptions.ValidationError('No Postgres drivers')\n con = psycopg2.connect(host=_host, port=_port, database=\n _database, user=_user, password=_password)\n elif _driver == 'mysql':\n try:\n import mysql.connector\n except:\n raise exceptions.ValidationError('No MySQL drivers')\n con = mysql.connector.connect(host=_host, port=_port,\n database=_database, user=_user, password=_password)\n elif _driver == 'mariadb':\n try:\n import mysql.connector\n except:\n raise exceptions.ValidationError('No MariaDB drivers')\n con = mysql.connector.connect(host=_host, port=_port,\n database=_database, user=_user, password=_password)\n elif _driver == 'oracle':\n try:\n import cx_Oracle\n except:\n raise exceptions.ValidationError('No Oracle drivers')\n con = cx_Oracle.connect(_user + '/' + _password + '@//' +\n _host + ':' + _port + '/' + _sid)\n elif _driver == 'sqlserver':\n try:\n import pyodbc\n except:\n raise exceptions.ValidationError(\n 'No SQLServer (ODBC) drivers')\n _logger.debug('DRIVER={' + _odbcdriver + '};SERVER=' +\n _host + ';DATABASE=' + _database + ';UID=' + _user +\n ';PWD=' + _password)\n con = pyodbc.connect('DRIVER={' + _odbcdriver + '};SERVER=' +\n _host + ';DATABASE=' + _database + ';UID=' + _user +\n ';PWD=' + _password)\n self._sqldriver = 'sqlserver'\n elif _driver == 'odbc':\n try:\n import pyodbc\n except:\n raise exceptions.ValidationError('No ODBC drivers')\n _logger.debug('DRIVER={' + _odbcdriver + '};SERVER=' +\n _host + ';DATABASE=' + _database + ';UID=' + _user +\n ';PWD=' + _password)\n con = pyodbc.connect('DRIVER={' + _odbcdriver + '};SERVER=' +\n _host + ';DATABASE=' + _database + ';UID=' + _user +\n ';PWD=' + _password)\n self._sqldriver = 'odbc'\n else:\n raise exceptions.ValidationError('Driver not supported')\n except:\n raise exceptions.ValidationError('Database connection failed')\n return con\n",
"step-5": "# -*- coding: utf-8 -*-\r\n#############################################################################\r\n#\r\n# Copyright (C) 2019-Antti Kärki.\r\n# Author: Antti Kärki.\r\n#\r\n# You can modify it under the terms of the GNU AFFERO\r\n# GENERAL PUBLIC LICENSE (AGPL v3), Version 3.\r\n#\r\n# This program is distributed in the hope that it will be useful,\r\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\r\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\r\n# GNU AFFERO GENERAL PUBLIC LICENSE (AGPL v3) for more details.\r\n#\r\n# You should have received a copy of the GNU AFFERO GENERAL PUBLIC LICENSE\r\n# (AGPL v3) along with this program.\r\n# If not, see <http://www.gnu.org/licenses/>.\r\n#\r\n#############################################################################\r\n\r\n\r\nfrom odoo import api, fields, models\r\nfrom odoo import exceptions\r\nimport logging\r\n\r\n_logger = logging.getLogger(__name__)\r\n\r\n\r\nclass rocker_connection():\r\n\r\n @api.multi\r\n def create_connection(self):\r\n\r\n _database_record = self\r\n _datasource = _database_record.name\r\n _driver = _database_record.driver\r\n _odbcdriver = _database_record.odbcdriver\r\n _sid = _database_record.database\r\n _database = _database_record.database\r\n _host = _database_record.host\r\n _port = _database_record.port\r\n _user = _database_record.user\r\n _password = _database_record.password\r\n\r\n con = None\r\n _logger.info('Connecting to database: ' + _database)\r\n\r\n try:\r\n if _driver == 'postgresql':\r\n try:\r\n import psycopg2\r\n except:\r\n raise exceptions.ValidationError('No Postgres drivers')\r\n con = psycopg2.connect(host=_host, port=_port, database=_database, user=_user, password=_password)\r\n elif _driver == \"mysql\":\r\n try:\r\n import mysql.connector\r\n except:\r\n raise exceptions.ValidationError('No MySQL drivers')\r\n con = mysql.connector.connect(host=_host, port=_port, database=_database, user=_user,\r\n password=_password)\r\n elif _driver == \"mariadb\":\r\n try:\r\n import mysql.connector\r\n except:\r\n raise exceptions.ValidationError('No MariaDB drivers')\r\n con = mysql.connector.connect(host=_host, port=_port, database=_database, user=_user,\r\n password=_password)\r\n elif _driver == \"oracle\":\r\n try:\r\n import cx_Oracle\r\n except:\r\n raise exceptions.ValidationError('No Oracle drivers')\r\n con = cx_Oracle.connect(_user + '/' + _password + '@//' + _host + ':' + _port + '/' + _sid)\r\n elif _driver == \"sqlserver\":\r\n try:\r\n import pyodbc\r\n except:\r\n raise exceptions.ValidationError('No SQLServer (ODBC) drivers')\r\n _logger.debug(\r\n 'DRIVER={' + _odbcdriver + '};SERVER=' + _host + ';DATABASE=' + _database + ';UID=' + _user + ';PWD=' + _password)\r\n con = pyodbc.connect(\r\n 'DRIVER={' + _odbcdriver + '};SERVER=' + _host + ';DATABASE=' + _database + ';UID=' + _user + ';PWD=' + _password)\r\n self._sqldriver = 'sqlserver'\r\n elif _driver == \"odbc\":\r\n try:\r\n import pyodbc\r\n except:\r\n raise exceptions.ValidationError('No ODBC drivers')\r\n _logger.debug(\r\n 'DRIVER={' + _odbcdriver + '};SERVER=' + _host + ';DATABASE=' + _database + ';UID=' + _user + ';PWD=' + _password)\r\n con = pyodbc.connect(\r\n 'DRIVER={' + _odbcdriver + '};SERVER=' + _host + ';DATABASE=' + _database + ';UID=' + _user + ';PWD=' + _password)\r\n self._sqldriver = 'odbc'\r\n else:\r\n raise exceptions.ValidationError('Driver not supported')\r\n except:\r\n raise exceptions.ValidationError('Database connection failed')\r\n return con\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import os
import math
def get_datas():
filename = None
while True:
filename = input('Please enter filename:')
if not filename.strip():
print('Filename is empty!')
continue
if not os.path.exists(filename):
print('File is not exists!')
continue
break
try:
with open(filename) as f:
datas = []
while True:
headers = f.readline().strip().split('\t')
if headers:
break
for line in f.readlines():
row_datas = {}
if line.strip():
row = line.strip().split('\t')
for k,v in zip(headers, row):
row_datas[k] = v
datas.append(row_datas)
return headers,datas
except Exception as e:
print(e)
def display_all(headers, datas):
if not datas:
print('No datas!')
return
max_page = math.ceil(len(datas) / 10)
page = 0
page_num = 10
while True:
for header in headers:
print(header, end='\t')
print()
for row in datas[page * 10 : (page + 1) * 10]:
for k in headers:
print(row[k], end='\t')
print()
command = input('Continue(Enter) or Quit(Q)?')
if command.strip().lower() == 'q':
break
page += 1
if page >= max_page:
break
def query_from_id(headers, datas):
while True:
ID = input('Please input a students\'s ID:').strip()
if ID:
break
flag = True
for data in datas:
if data['ID'] == ID:
flag = False
for header in headers:
print(header, ':\t', data[header])
if flag:
print('No data was finded!')
def query_from_lastname(headers, datas):
while True:
name = input('Please input a students\'s name:').strip()
if name:
break
flag = True
for data in datas:
if data['Last'].lower().startswith(name.lower()):
flag = False
for header in headers:
print(header, ':\t', data[header])
if flag:
print('No data was finded!')
def query_from_some_field(headers, datas):
while True:
print('All fields:', headers)
field_name = input('Please input a students\'s field name:').strip()
if field_name and field_name in headers:
break
while True:
value = input('Please input a students\'s value:').strip().lower()
if value:
break
for header in headers:
print(header, end='\t')
print()
for data in datas:
if data[field_name].lower() == value:
for header in headers:
print(data[header], end='\t')
print()
def display_grad_year(headers, datas):
while True:
grad_year = input('Please input a students\'s GradYear:').strip()
if grad_year and grad_year.isdigit():
# grad_year = int(grad_year)
break
datas = [d for d in datas if d['GradYear'] == grad_year]
# print(datas)
display_all(headers, datas)
def count_one_year(headers, datas, grad_year):
ret = {}
for data in datas:
if data['GradYear'] == grad_year:
if data['DegreeProgram'] in ret:
ret[data['DegreeProgram']] += 1
else:
ret[data['DegreeProgram']] = 1
# print(ret)
if ret:
totals = sum(ret.values())
for k,v in ret.items():
print(k, ':', v, 'Percent:', v / totals * 100)
else:
print('No datas!')
def count_from_grad_year(headers, datas):
while True:
grad_year = input('Please input a students\'s GradYear:').strip()
if grad_year and grad_year.isdigit():
# grad_year = int(grad_year)
break
while True:
on_after = input('Please Select On or After(On or Aft)? :').strip().lower()
if on_after and on_after in ('on', 'aft'):
break
if on_after == 'on':
count_one_year(headers, datas, grad_year)
elif on_after == 'aft':
max_year = 0
for data in datas:
if int(data['GradYear']) > max_year:
max_year = int(data['GradYear'])
if max_year < int(grad_year):
print('No datas')
else:
for year in range(int(grad_year), max_year):
count_one_year(headers, datas, grad_year)
def main():
print('init from file ...')
while True:
datas = get_datas()
if datas:
break
headers, studs = datas
commands = {'list':display_all,'qid':query_from_id,
'qlst':query_from_lastname, 'qfd':query_from_some_field,
'qcgy': count_from_grad_year, 'dgy':display_grad_year}
while True:
print()
print('-------------------------------')
print('List all:(list); Query ID:(Qid); Query Last(Qlst); Query field(Qfd);\
Count GradYear(Qcgy); display_grad_year(Dgy); Quit(Q)')
print('-------------------------------')
command = input('Input your command:').lower()
print()
if command == 'q':
break
if not command or command not in commands.keys():
print('Bad command!')
continue
else:
commands[command](headers, studs)
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "6829f7bcbc1b12500795eec19829ff077502e270",
"index": 3260,
"step-1": "<mask token>\n\n\ndef get_datas():\n filename = None\n while True:\n filename = input('Please enter filename:')\n if not filename.strip():\n print('Filename is empty!')\n continue\n if not os.path.exists(filename):\n print('File is not exists!')\n continue\n break\n try:\n with open(filename) as f:\n datas = []\n while True:\n headers = f.readline().strip().split('\\t')\n if headers:\n break\n for line in f.readlines():\n row_datas = {}\n if line.strip():\n row = line.strip().split('\\t')\n for k, v in zip(headers, row):\n row_datas[k] = v\n datas.append(row_datas)\n return headers, datas\n except Exception as e:\n print(e)\n\n\ndef display_all(headers, datas):\n if not datas:\n print('No datas!')\n return\n max_page = math.ceil(len(datas) / 10)\n page = 0\n page_num = 10\n while True:\n for header in headers:\n print(header, end='\\t')\n print()\n for row in datas[page * 10:(page + 1) * 10]:\n for k in headers:\n print(row[k], end='\\t')\n print()\n command = input('Continue(Enter) or Quit(Q)?')\n if command.strip().lower() == 'q':\n break\n page += 1\n if page >= max_page:\n break\n\n\n<mask token>\n\n\ndef query_from_some_field(headers, datas):\n while True:\n print('All fields:', headers)\n field_name = input(\"Please input a students's field name:\").strip()\n if field_name and field_name in headers:\n break\n while True:\n value = input(\"Please input a students's value:\").strip().lower()\n if value:\n break\n for header in headers:\n print(header, end='\\t')\n print()\n for data in datas:\n if data[field_name].lower() == value:\n for header in headers:\n print(data[header], end='\\t')\n print()\n\n\ndef display_grad_year(headers, datas):\n while True:\n grad_year = input(\"Please input a students's GradYear:\").strip()\n if grad_year and grad_year.isdigit():\n break\n datas = [d for d in datas if d['GradYear'] == grad_year]\n display_all(headers, datas)\n\n\ndef count_one_year(headers, datas, grad_year):\n ret = {}\n for data in datas:\n if data['GradYear'] == grad_year:\n if data['DegreeProgram'] in ret:\n ret[data['DegreeProgram']] += 1\n else:\n ret[data['DegreeProgram']] = 1\n if ret:\n totals = sum(ret.values())\n for k, v in ret.items():\n print(k, ':', v, 'Percent:', v / totals * 100)\n else:\n print('No datas!')\n\n\n<mask token>\n\n\ndef main():\n print('init from file ...')\n while True:\n datas = get_datas()\n if datas:\n break\n headers, studs = datas\n commands = {'list': display_all, 'qid': query_from_id, 'qlst':\n query_from_lastname, 'qfd': query_from_some_field, 'qcgy':\n count_from_grad_year, 'dgy': display_grad_year}\n while True:\n print()\n print('-------------------------------')\n print(\n 'List all:(list); Query ID:(Qid); Query Last(Qlst); Query field(Qfd); Count GradYear(Qcgy); display_grad_year(Dgy); Quit(Q)'\n )\n print('-------------------------------')\n command = input('Input your command:').lower()\n print()\n if command == 'q':\n break\n if not command or command not in commands.keys():\n print('Bad command!')\n continue\n else:\n commands[command](headers, studs)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_datas():\n filename = None\n while True:\n filename = input('Please enter filename:')\n if not filename.strip():\n print('Filename is empty!')\n continue\n if not os.path.exists(filename):\n print('File is not exists!')\n continue\n break\n try:\n with open(filename) as f:\n datas = []\n while True:\n headers = f.readline().strip().split('\\t')\n if headers:\n break\n for line in f.readlines():\n row_datas = {}\n if line.strip():\n row = line.strip().split('\\t')\n for k, v in zip(headers, row):\n row_datas[k] = v\n datas.append(row_datas)\n return headers, datas\n except Exception as e:\n print(e)\n\n\ndef display_all(headers, datas):\n if not datas:\n print('No datas!')\n return\n max_page = math.ceil(len(datas) / 10)\n page = 0\n page_num = 10\n while True:\n for header in headers:\n print(header, end='\\t')\n print()\n for row in datas[page * 10:(page + 1) * 10]:\n for k in headers:\n print(row[k], end='\\t')\n print()\n command = input('Continue(Enter) or Quit(Q)?')\n if command.strip().lower() == 'q':\n break\n page += 1\n if page >= max_page:\n break\n\n\n<mask token>\n\n\ndef query_from_some_field(headers, datas):\n while True:\n print('All fields:', headers)\n field_name = input(\"Please input a students's field name:\").strip()\n if field_name and field_name in headers:\n break\n while True:\n value = input(\"Please input a students's value:\").strip().lower()\n if value:\n break\n for header in headers:\n print(header, end='\\t')\n print()\n for data in datas:\n if data[field_name].lower() == value:\n for header in headers:\n print(data[header], end='\\t')\n print()\n\n\ndef display_grad_year(headers, datas):\n while True:\n grad_year = input(\"Please input a students's GradYear:\").strip()\n if grad_year and grad_year.isdigit():\n break\n datas = [d for d in datas if d['GradYear'] == grad_year]\n display_all(headers, datas)\n\n\ndef count_one_year(headers, datas, grad_year):\n ret = {}\n for data in datas:\n if data['GradYear'] == grad_year:\n if data['DegreeProgram'] in ret:\n ret[data['DegreeProgram']] += 1\n else:\n ret[data['DegreeProgram']] = 1\n if ret:\n totals = sum(ret.values())\n for k, v in ret.items():\n print(k, ':', v, 'Percent:', v / totals * 100)\n else:\n print('No datas!')\n\n\ndef count_from_grad_year(headers, datas):\n while True:\n grad_year = input(\"Please input a students's GradYear:\").strip()\n if grad_year and grad_year.isdigit():\n break\n while True:\n on_after = input('Please Select On or After(On or Aft)? :').strip(\n ).lower()\n if on_after and on_after in ('on', 'aft'):\n break\n if on_after == 'on':\n count_one_year(headers, datas, grad_year)\n elif on_after == 'aft':\n max_year = 0\n for data in datas:\n if int(data['GradYear']) > max_year:\n max_year = int(data['GradYear'])\n if max_year < int(grad_year):\n print('No datas')\n else:\n for year in range(int(grad_year), max_year):\n count_one_year(headers, datas, grad_year)\n\n\ndef main():\n print('init from file ...')\n while True:\n datas = get_datas()\n if datas:\n break\n headers, studs = datas\n commands = {'list': display_all, 'qid': query_from_id, 'qlst':\n query_from_lastname, 'qfd': query_from_some_field, 'qcgy':\n count_from_grad_year, 'dgy': display_grad_year}\n while True:\n print()\n print('-------------------------------')\n print(\n 'List all:(list); Query ID:(Qid); Query Last(Qlst); Query field(Qfd); Count GradYear(Qcgy); display_grad_year(Dgy); Quit(Q)'\n )\n print('-------------------------------')\n command = input('Input your command:').lower()\n print()\n if command == 'q':\n break\n if not command or command not in commands.keys():\n print('Bad command!')\n continue\n else:\n commands[command](headers, studs)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_datas():\n filename = None\n while True:\n filename = input('Please enter filename:')\n if not filename.strip():\n print('Filename is empty!')\n continue\n if not os.path.exists(filename):\n print('File is not exists!')\n continue\n break\n try:\n with open(filename) as f:\n datas = []\n while True:\n headers = f.readline().strip().split('\\t')\n if headers:\n break\n for line in f.readlines():\n row_datas = {}\n if line.strip():\n row = line.strip().split('\\t')\n for k, v in zip(headers, row):\n row_datas[k] = v\n datas.append(row_datas)\n return headers, datas\n except Exception as e:\n print(e)\n\n\ndef display_all(headers, datas):\n if not datas:\n print('No datas!')\n return\n max_page = math.ceil(len(datas) / 10)\n page = 0\n page_num = 10\n while True:\n for header in headers:\n print(header, end='\\t')\n print()\n for row in datas[page * 10:(page + 1) * 10]:\n for k in headers:\n print(row[k], end='\\t')\n print()\n command = input('Continue(Enter) or Quit(Q)?')\n if command.strip().lower() == 'q':\n break\n page += 1\n if page >= max_page:\n break\n\n\ndef query_from_id(headers, datas):\n while True:\n ID = input(\"Please input a students's ID:\").strip()\n if ID:\n break\n flag = True\n for data in datas:\n if data['ID'] == ID:\n flag = False\n for header in headers:\n print(header, ':\\t', data[header])\n if flag:\n print('No data was finded!')\n\n\n<mask token>\n\n\ndef query_from_some_field(headers, datas):\n while True:\n print('All fields:', headers)\n field_name = input(\"Please input a students's field name:\").strip()\n if field_name and field_name in headers:\n break\n while True:\n value = input(\"Please input a students's value:\").strip().lower()\n if value:\n break\n for header in headers:\n print(header, end='\\t')\n print()\n for data in datas:\n if data[field_name].lower() == value:\n for header in headers:\n print(data[header], end='\\t')\n print()\n\n\ndef display_grad_year(headers, datas):\n while True:\n grad_year = input(\"Please input a students's GradYear:\").strip()\n if grad_year and grad_year.isdigit():\n break\n datas = [d for d in datas if d['GradYear'] == grad_year]\n display_all(headers, datas)\n\n\ndef count_one_year(headers, datas, grad_year):\n ret = {}\n for data in datas:\n if data['GradYear'] == grad_year:\n if data['DegreeProgram'] in ret:\n ret[data['DegreeProgram']] += 1\n else:\n ret[data['DegreeProgram']] = 1\n if ret:\n totals = sum(ret.values())\n for k, v in ret.items():\n print(k, ':', v, 'Percent:', v / totals * 100)\n else:\n print('No datas!')\n\n\ndef count_from_grad_year(headers, datas):\n while True:\n grad_year = input(\"Please input a students's GradYear:\").strip()\n if grad_year and grad_year.isdigit():\n break\n while True:\n on_after = input('Please Select On or After(On or Aft)? :').strip(\n ).lower()\n if on_after and on_after in ('on', 'aft'):\n break\n if on_after == 'on':\n count_one_year(headers, datas, grad_year)\n elif on_after == 'aft':\n max_year = 0\n for data in datas:\n if int(data['GradYear']) > max_year:\n max_year = int(data['GradYear'])\n if max_year < int(grad_year):\n print('No datas')\n else:\n for year in range(int(grad_year), max_year):\n count_one_year(headers, datas, grad_year)\n\n\ndef main():\n print('init from file ...')\n while True:\n datas = get_datas()\n if datas:\n break\n headers, studs = datas\n commands = {'list': display_all, 'qid': query_from_id, 'qlst':\n query_from_lastname, 'qfd': query_from_some_field, 'qcgy':\n count_from_grad_year, 'dgy': display_grad_year}\n while True:\n print()\n print('-------------------------------')\n print(\n 'List all:(list); Query ID:(Qid); Query Last(Qlst); Query field(Qfd); Count GradYear(Qcgy); display_grad_year(Dgy); Quit(Q)'\n )\n print('-------------------------------')\n command = input('Input your command:').lower()\n print()\n if command == 'q':\n break\n if not command or command not in commands.keys():\n print('Bad command!')\n continue\n else:\n commands[command](headers, studs)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef get_datas():\n filename = None\n while True:\n filename = input('Please enter filename:')\n if not filename.strip():\n print('Filename is empty!')\n continue\n if not os.path.exists(filename):\n print('File is not exists!')\n continue\n break\n try:\n with open(filename) as f:\n datas = []\n while True:\n headers = f.readline().strip().split('\\t')\n if headers:\n break\n for line in f.readlines():\n row_datas = {}\n if line.strip():\n row = line.strip().split('\\t')\n for k, v in zip(headers, row):\n row_datas[k] = v\n datas.append(row_datas)\n return headers, datas\n except Exception as e:\n print(e)\n\n\ndef display_all(headers, datas):\n if not datas:\n print('No datas!')\n return\n max_page = math.ceil(len(datas) / 10)\n page = 0\n page_num = 10\n while True:\n for header in headers:\n print(header, end='\\t')\n print()\n for row in datas[page * 10:(page + 1) * 10]:\n for k in headers:\n print(row[k], end='\\t')\n print()\n command = input('Continue(Enter) or Quit(Q)?')\n if command.strip().lower() == 'q':\n break\n page += 1\n if page >= max_page:\n break\n\n\ndef query_from_id(headers, datas):\n while True:\n ID = input(\"Please input a students's ID:\").strip()\n if ID:\n break\n flag = True\n for data in datas:\n if data['ID'] == ID:\n flag = False\n for header in headers:\n print(header, ':\\t', data[header])\n if flag:\n print('No data was finded!')\n\n\ndef query_from_lastname(headers, datas):\n while True:\n name = input(\"Please input a students's name:\").strip()\n if name:\n break\n flag = True\n for data in datas:\n if data['Last'].lower().startswith(name.lower()):\n flag = False\n for header in headers:\n print(header, ':\\t', data[header])\n if flag:\n print('No data was finded!')\n\n\ndef query_from_some_field(headers, datas):\n while True:\n print('All fields:', headers)\n field_name = input(\"Please input a students's field name:\").strip()\n if field_name and field_name in headers:\n break\n while True:\n value = input(\"Please input a students's value:\").strip().lower()\n if value:\n break\n for header in headers:\n print(header, end='\\t')\n print()\n for data in datas:\n if data[field_name].lower() == value:\n for header in headers:\n print(data[header], end='\\t')\n print()\n\n\ndef display_grad_year(headers, datas):\n while True:\n grad_year = input(\"Please input a students's GradYear:\").strip()\n if grad_year and grad_year.isdigit():\n break\n datas = [d for d in datas if d['GradYear'] == grad_year]\n display_all(headers, datas)\n\n\ndef count_one_year(headers, datas, grad_year):\n ret = {}\n for data in datas:\n if data['GradYear'] == grad_year:\n if data['DegreeProgram'] in ret:\n ret[data['DegreeProgram']] += 1\n else:\n ret[data['DegreeProgram']] = 1\n if ret:\n totals = sum(ret.values())\n for k, v in ret.items():\n print(k, ':', v, 'Percent:', v / totals * 100)\n else:\n print('No datas!')\n\n\ndef count_from_grad_year(headers, datas):\n while True:\n grad_year = input(\"Please input a students's GradYear:\").strip()\n if grad_year and grad_year.isdigit():\n break\n while True:\n on_after = input('Please Select On or After(On or Aft)? :').strip(\n ).lower()\n if on_after and on_after in ('on', 'aft'):\n break\n if on_after == 'on':\n count_one_year(headers, datas, grad_year)\n elif on_after == 'aft':\n max_year = 0\n for data in datas:\n if int(data['GradYear']) > max_year:\n max_year = int(data['GradYear'])\n if max_year < int(grad_year):\n print('No datas')\n else:\n for year in range(int(grad_year), max_year):\n count_one_year(headers, datas, grad_year)\n\n\ndef main():\n print('init from file ...')\n while True:\n datas = get_datas()\n if datas:\n break\n headers, studs = datas\n commands = {'list': display_all, 'qid': query_from_id, 'qlst':\n query_from_lastname, 'qfd': query_from_some_field, 'qcgy':\n count_from_grad_year, 'dgy': display_grad_year}\n while True:\n print()\n print('-------------------------------')\n print(\n 'List all:(list); Query ID:(Qid); Query Last(Qlst); Query field(Qfd); Count GradYear(Qcgy); display_grad_year(Dgy); Quit(Q)'\n )\n print('-------------------------------')\n command = input('Input your command:').lower()\n print()\n if command == 'q':\n break\n if not command or command not in commands.keys():\n print('Bad command!')\n continue\n else:\n commands[command](headers, studs)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import os\nimport math\n\ndef get_datas():\n filename = None\n while True:\n filename = input('Please enter filename:')\n if not filename.strip():\n print('Filename is empty!')\n continue\n if not os.path.exists(filename):\n print('File is not exists!')\n continue\n break\n try:\n with open(filename) as f:\n datas = []\n while True:\n headers = f.readline().strip().split('\\t')\n if headers:\n break\n for line in f.readlines():\n row_datas = {}\n if line.strip():\n row = line.strip().split('\\t')\n for k,v in zip(headers, row):\n row_datas[k] = v\n datas.append(row_datas)\n return headers,datas\n except Exception as e:\n print(e)\n\ndef display_all(headers, datas):\n if not datas:\n print('No datas!')\n return\n max_page = math.ceil(len(datas) / 10)\n page = 0\n page_num = 10\n while True:\n for header in headers:\n print(header, end='\\t')\n print()\n for row in datas[page * 10 : (page + 1) * 10]:\n for k in headers:\n print(row[k], end='\\t')\n print()\n command = input('Continue(Enter) or Quit(Q)?')\n if command.strip().lower() == 'q':\n break\n page += 1\n if page >= max_page:\n break\n\n\n\ndef query_from_id(headers, datas):\n while True:\n ID = input('Please input a students\\'s ID:').strip()\n if ID:\n break\n flag = True\n for data in datas:\n if data['ID'] == ID:\n flag = False\n for header in headers:\n print(header, ':\\t', data[header])\n if flag:\n print('No data was finded!')\n\ndef query_from_lastname(headers, datas):\n while True:\n name = input('Please input a students\\'s name:').strip()\n if name:\n break\n flag = True\n for data in datas:\n if data['Last'].lower().startswith(name.lower()):\n flag = False\n for header in headers:\n print(header, ':\\t', data[header])\n if flag:\n print('No data was finded!')\n\ndef query_from_some_field(headers, datas):\n while True:\n print('All fields:', headers)\n field_name = input('Please input a students\\'s field name:').strip()\n if field_name and field_name in headers:\n break\n while True:\n value = input('Please input a students\\'s value:').strip().lower()\n if value:\n break\n for header in headers:\n print(header, end='\\t')\n print()\n for data in datas:\n if data[field_name].lower() == value:\n for header in headers:\n print(data[header], end='\\t')\n print()\n\ndef display_grad_year(headers, datas):\n while True:\n grad_year = input('Please input a students\\'s GradYear:').strip()\n if grad_year and grad_year.isdigit():\n # grad_year = int(grad_year)\n break\n datas = [d for d in datas if d['GradYear'] == grad_year]\n # print(datas)\n display_all(headers, datas)\n\ndef count_one_year(headers, datas, grad_year):\n ret = {}\n for data in datas:\n if data['GradYear'] == grad_year:\n if data['DegreeProgram'] in ret:\n ret[data['DegreeProgram']] += 1\n else:\n ret[data['DegreeProgram']] = 1\n # print(ret)\n if ret:\n totals = sum(ret.values())\n for k,v in ret.items():\n print(k, ':', v, 'Percent:', v / totals * 100)\n else:\n print('No datas!')\n\ndef count_from_grad_year(headers, datas):\n while True:\n grad_year = input('Please input a students\\'s GradYear:').strip()\n if grad_year and grad_year.isdigit():\n # grad_year = int(grad_year)\n break\n while True:\n on_after = input('Please Select On or After(On or Aft)? :').strip().lower()\n if on_after and on_after in ('on', 'aft'):\n break\n if on_after == 'on':\n count_one_year(headers, datas, grad_year)\n elif on_after == 'aft':\n max_year = 0\n for data in datas:\n if int(data['GradYear']) > max_year:\n max_year = int(data['GradYear'])\n if max_year < int(grad_year):\n print('No datas')\n else:\n for year in range(int(grad_year), max_year):\n count_one_year(headers, datas, grad_year)\n\n\ndef main():\n print('init from file ...')\n while True:\n datas = get_datas()\n if datas:\n break\n headers, studs = datas\n commands = {'list':display_all,'qid':query_from_id,\n 'qlst':query_from_lastname, 'qfd':query_from_some_field,\n 'qcgy': count_from_grad_year, 'dgy':display_grad_year}\n while True:\n print()\n print('-------------------------------')\n print('List all:(list); Query ID:(Qid); Query Last(Qlst); Query field(Qfd);\\\n Count GradYear(Qcgy); display_grad_year(Dgy); Quit(Q)')\n print('-------------------------------')\n command = input('Input your command:').lower()\n print()\n if command == 'q':\n break\n if not command or command not in commands.keys():\n print('Bad command!')\n continue\n else:\n commands[command](headers, studs)\n \n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
6,
7,
8,
10,
12
]
}
|
[
6,
7,
8,
10,
12
] |
<|reserved_special_token_0|>
class BulletSpawnerTemplate(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def setRounds(self, rounds):
self._rounds = rounds
<|reserved_special_token_0|>
def setInBetweenTimer(self, delay):
self._inBetweenTimer = delay
<|reserved_special_token_0|>
def addBulletTemplate(self, bulletTemplate):
self._bulletTemplate = bulletTemplate
def addMovementCommand(self, cycle, movementCommand):
self._movementList[cycle] = movementCommand
<|reserved_special_token_0|>
class BulletMasterTemplate(object):
def __init__(self, name):
self._name = name
self._bulletSpawnerTemplates = []
self._powerUpTable = {'life': 0, 'power': 0, 'spell': 0, 'points': 0}
def addBulletSpawnerTemplates(self, bulletSpawnerTemplate):
self._bulletSpawnerTemplates.append(bulletSpawnerTemplate)
class Bullet(MovementCommander):
def __init__(self, bulletTemplate, position, exitAngle, master,
spawningCycle):
temp = copy.deepcopy(bulletTemplate._initialVelocity)
temp._angle = temp._angle + exitAngle
super().__init__(position, temp, spawningCycle)
self.addStartingParameters(position, temp)
self._animationName = bulletTemplate._animationName
for i in bulletTemplate._movementList:
self.addMovementCommandDirect(i, bulletTemplate._movementList[i])
self.calculatePositions(master, master._playerPosition, [-100, -100,
1620, 1180], None)
class BulletSpawner(MovementCommander):
def __init__(self, bulletSpawnerTemplate, masterPosition, master, enemy,
spawningCycle):
self._internalCounter = 0
self._exitLocations = []
self._displacement = 0.0
self._master = master
self._displacement = bulletSpawnerTemplate._displacement
for i in bulletSpawnerTemplate._exitLocations:
self._exitLocations.append(i)
self._rotationSpeed = bulletSpawnerTemplate._rotationSpeed
self._bulletTemplate = bulletSpawnerTemplate._bulletTemplate
self._spawningCycle = enemy._spawningCycle
self._seenCycle = enemy._spawningCycle
self._deathCycle = enemy._deathCycle
self._sprayTimer = bulletSpawnerTemplate._sprayTimer
self._initialDelay = bulletSpawnerTemplate._initialDelay
try:
self._lengthOfSpray = max(self._sprayTimer)
except ValueError:
self._lengthOfSpray = 0
self._inBetweenTimer = bulletSpawnerTemplate._inBetweenTimer
self._rounds = bulletSpawnerTemplate._rounds
super().__init__(bulletSpawnerTemplate._initialPosition,
bulletSpawnerTemplate._initialVelocity, spawningCycle)
self.calculatePositions(master, master._playerPosition, None,
masterPosition)
self._maskName = bulletSpawnerTemplate._maskName
self._maskLayer = bulletSpawnerTemplate._maskLayer
def calculateBullets(self):
returnList = []
mode = 'initialDelayMode'
switchCounter = -1
currentRound = 0
for i in self._positionList:
self._internalCounter = self._internalCounter + 1
switchCounter = switchCounter + 1
if mode == 'initialDelayMode':
if switchCounter >= self._initialDelay:
mode = 'sprayMode'
switchCounter = -1
self._seenCycle = (self._spawningCycle + self.
_internalCounter)
elif mode == 'sprayMode':
if switchCounter in self._sprayTimer:
for j in self._exitLocations:
offset = CUS_Polar(self._displacement, j)
pos = CUS_Point(0.0, 0.0)
pos.add(toPoint(offset))
pos._x = pos._x + i._x
pos._y = pos._y + i._y
bullet = Bullet(self._bulletTemplate, pos, j, self.
_master, self._spawningCycle + self.
_internalCounter)
returnList.append(bullet)
if switchCounter >= self._lengthOfSpray:
mode = 'inBetweenTimerMode'
currentRound = currentRound + 1
switchCounter = -1
elif mode == 'inBetweenTimerMode':
if switchCounter >= self._inBetweenTimer:
mode = 'sprayMode'
switchCounter = -1
if currentRound >= self._rounds and self._rounds is not -1:
mode = 'sprayOverMode'
self._deathCycle = (self._spawningCycle + self.
_internalCounter)
return returnList
class BulletMaster(object):
def __init__(self, bulletMasterTemplate, masterPositionList, master,
enemy, spawningCycle):
self._name = bulletMasterTemplate._name
self._bulletSpawners = []
for i in bulletMasterTemplate._bulletSpawnerTemplates:
self._bulletSpawners.append(BulletSpawner(i, masterPositionList,
master, enemy, spawningCycle))
def calculateBullets(self):
returnList = []
for i in self._bulletSpawners:
returnList.extend(i.calculateBullets())
return returnList
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BulletSpawnerTemplate(object):
def __init__(self, initialPosition, initialVelocity):
self._spawningCycle = 0
self._initialPosition = initialPosition
self._initialVelocity = initialVelocity
self._movementList = dict()
self._displacement = 0
self._exitLocations = []
self._rotationSpeed = 0
self._initialDelay = 0
self._sprayTimer = []
self._inBetweenTimer = 0
self._rounds = -1
self._bulletTemplate = None
self._maskName = ''
self._maskLayer = 0
<|reserved_special_token_0|>
def setRounds(self, rounds):
self._rounds = rounds
def setInitialDelay(self, initialDelay):
self._initialDelay = initialDelay
def setInBetweenTimer(self, delay):
self._inBetweenTimer = delay
<|reserved_special_token_0|>
def addBulletTemplate(self, bulletTemplate):
self._bulletTemplate = bulletTemplate
def addMovementCommand(self, cycle, movementCommand):
self._movementList[cycle] = movementCommand
def addMask(self, maskName, maskLayer):
self._maskName = maskName
self._maskLayer = maskLayer
class BulletMasterTemplate(object):
def __init__(self, name):
self._name = name
self._bulletSpawnerTemplates = []
self._powerUpTable = {'life': 0, 'power': 0, 'spell': 0, 'points': 0}
def addBulletSpawnerTemplates(self, bulletSpawnerTemplate):
self._bulletSpawnerTemplates.append(bulletSpawnerTemplate)
class Bullet(MovementCommander):
def __init__(self, bulletTemplate, position, exitAngle, master,
spawningCycle):
temp = copy.deepcopy(bulletTemplate._initialVelocity)
temp._angle = temp._angle + exitAngle
super().__init__(position, temp, spawningCycle)
self.addStartingParameters(position, temp)
self._animationName = bulletTemplate._animationName
for i in bulletTemplate._movementList:
self.addMovementCommandDirect(i, bulletTemplate._movementList[i])
self.calculatePositions(master, master._playerPosition, [-100, -100,
1620, 1180], None)
class BulletSpawner(MovementCommander):
def __init__(self, bulletSpawnerTemplate, masterPosition, master, enemy,
spawningCycle):
self._internalCounter = 0
self._exitLocations = []
self._displacement = 0.0
self._master = master
self._displacement = bulletSpawnerTemplate._displacement
for i in bulletSpawnerTemplate._exitLocations:
self._exitLocations.append(i)
self._rotationSpeed = bulletSpawnerTemplate._rotationSpeed
self._bulletTemplate = bulletSpawnerTemplate._bulletTemplate
self._spawningCycle = enemy._spawningCycle
self._seenCycle = enemy._spawningCycle
self._deathCycle = enemy._deathCycle
self._sprayTimer = bulletSpawnerTemplate._sprayTimer
self._initialDelay = bulletSpawnerTemplate._initialDelay
try:
self._lengthOfSpray = max(self._sprayTimer)
except ValueError:
self._lengthOfSpray = 0
self._inBetweenTimer = bulletSpawnerTemplate._inBetweenTimer
self._rounds = bulletSpawnerTemplate._rounds
super().__init__(bulletSpawnerTemplate._initialPosition,
bulletSpawnerTemplate._initialVelocity, spawningCycle)
self.calculatePositions(master, master._playerPosition, None,
masterPosition)
self._maskName = bulletSpawnerTemplate._maskName
self._maskLayer = bulletSpawnerTemplate._maskLayer
def calculateBullets(self):
returnList = []
mode = 'initialDelayMode'
switchCounter = -1
currentRound = 0
for i in self._positionList:
self._internalCounter = self._internalCounter + 1
switchCounter = switchCounter + 1
if mode == 'initialDelayMode':
if switchCounter >= self._initialDelay:
mode = 'sprayMode'
switchCounter = -1
self._seenCycle = (self._spawningCycle + self.
_internalCounter)
elif mode == 'sprayMode':
if switchCounter in self._sprayTimer:
for j in self._exitLocations:
offset = CUS_Polar(self._displacement, j)
pos = CUS_Point(0.0, 0.0)
pos.add(toPoint(offset))
pos._x = pos._x + i._x
pos._y = pos._y + i._y
bullet = Bullet(self._bulletTemplate, pos, j, self.
_master, self._spawningCycle + self.
_internalCounter)
returnList.append(bullet)
if switchCounter >= self._lengthOfSpray:
mode = 'inBetweenTimerMode'
currentRound = currentRound + 1
switchCounter = -1
elif mode == 'inBetweenTimerMode':
if switchCounter >= self._inBetweenTimer:
mode = 'sprayMode'
switchCounter = -1
if currentRound >= self._rounds and self._rounds is not -1:
mode = 'sprayOverMode'
self._deathCycle = (self._spawningCycle + self.
_internalCounter)
return returnList
class BulletMaster(object):
def __init__(self, bulletMasterTemplate, masterPositionList, master,
enemy, spawningCycle):
self._name = bulletMasterTemplate._name
self._bulletSpawners = []
for i in bulletMasterTemplate._bulletSpawnerTemplates:
self._bulletSpawners.append(BulletSpawner(i, masterPositionList,
master, enemy, spawningCycle))
def calculateBullets(self):
returnList = []
for i in self._bulletSpawners:
returnList.extend(i.calculateBullets())
return returnList
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BulletTemplate(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class BulletSpawnerTemplate(object):
def __init__(self, initialPosition, initialVelocity):
self._spawningCycle = 0
self._initialPosition = initialPosition
self._initialVelocity = initialVelocity
self._movementList = dict()
self._displacement = 0
self._exitLocations = []
self._rotationSpeed = 0
self._initialDelay = 0
self._sprayTimer = []
self._inBetweenTimer = 0
self._rounds = -1
self._bulletTemplate = None
self._maskName = ''
self._maskLayer = 0
def addSprayTimer(self, sprayTimer):
self._sprayTimer.extend(sprayTimer)
def setRounds(self, rounds):
self._rounds = rounds
def setInitialDelay(self, initialDelay):
self._initialDelay = initialDelay
def setInBetweenTimer(self, delay):
self._inBetweenTimer = delay
def addExitLocation(self, location):
self._exitLocations.append(location)
def addBulletTemplate(self, bulletTemplate):
self._bulletTemplate = bulletTemplate
def addMovementCommand(self, cycle, movementCommand):
self._movementList[cycle] = movementCommand
def addMask(self, maskName, maskLayer):
self._maskName = maskName
self._maskLayer = maskLayer
class BulletMasterTemplate(object):
def __init__(self, name):
self._name = name
self._bulletSpawnerTemplates = []
self._powerUpTable = {'life': 0, 'power': 0, 'spell': 0, 'points': 0}
def addBulletSpawnerTemplates(self, bulletSpawnerTemplate):
self._bulletSpawnerTemplates.append(bulletSpawnerTemplate)
class Bullet(MovementCommander):
def __init__(self, bulletTemplate, position, exitAngle, master,
spawningCycle):
temp = copy.deepcopy(bulletTemplate._initialVelocity)
temp._angle = temp._angle + exitAngle
super().__init__(position, temp, spawningCycle)
self.addStartingParameters(position, temp)
self._animationName = bulletTemplate._animationName
for i in bulletTemplate._movementList:
self.addMovementCommandDirect(i, bulletTemplate._movementList[i])
self.calculatePositions(master, master._playerPosition, [-100, -100,
1620, 1180], None)
class BulletSpawner(MovementCommander):
def __init__(self, bulletSpawnerTemplate, masterPosition, master, enemy,
spawningCycle):
self._internalCounter = 0
self._exitLocations = []
self._displacement = 0.0
self._master = master
self._displacement = bulletSpawnerTemplate._displacement
for i in bulletSpawnerTemplate._exitLocations:
self._exitLocations.append(i)
self._rotationSpeed = bulletSpawnerTemplate._rotationSpeed
self._bulletTemplate = bulletSpawnerTemplate._bulletTemplate
self._spawningCycle = enemy._spawningCycle
self._seenCycle = enemy._spawningCycle
self._deathCycle = enemy._deathCycle
self._sprayTimer = bulletSpawnerTemplate._sprayTimer
self._initialDelay = bulletSpawnerTemplate._initialDelay
try:
self._lengthOfSpray = max(self._sprayTimer)
except ValueError:
self._lengthOfSpray = 0
self._inBetweenTimer = bulletSpawnerTemplate._inBetweenTimer
self._rounds = bulletSpawnerTemplate._rounds
super().__init__(bulletSpawnerTemplate._initialPosition,
bulletSpawnerTemplate._initialVelocity, spawningCycle)
self.calculatePositions(master, master._playerPosition, None,
masterPosition)
self._maskName = bulletSpawnerTemplate._maskName
self._maskLayer = bulletSpawnerTemplate._maskLayer
def calculateBullets(self):
returnList = []
mode = 'initialDelayMode'
switchCounter = -1
currentRound = 0
for i in self._positionList:
self._internalCounter = self._internalCounter + 1
switchCounter = switchCounter + 1
if mode == 'initialDelayMode':
if switchCounter >= self._initialDelay:
mode = 'sprayMode'
switchCounter = -1
self._seenCycle = (self._spawningCycle + self.
_internalCounter)
elif mode == 'sprayMode':
if switchCounter in self._sprayTimer:
for j in self._exitLocations:
offset = CUS_Polar(self._displacement, j)
pos = CUS_Point(0.0, 0.0)
pos.add(toPoint(offset))
pos._x = pos._x + i._x
pos._y = pos._y + i._y
bullet = Bullet(self._bulletTemplate, pos, j, self.
_master, self._spawningCycle + self.
_internalCounter)
returnList.append(bullet)
if switchCounter >= self._lengthOfSpray:
mode = 'inBetweenTimerMode'
currentRound = currentRound + 1
switchCounter = -1
elif mode == 'inBetweenTimerMode':
if switchCounter >= self._inBetweenTimer:
mode = 'sprayMode'
switchCounter = -1
if currentRound >= self._rounds and self._rounds is not -1:
mode = 'sprayOverMode'
self._deathCycle = (self._spawningCycle + self.
_internalCounter)
return returnList
class BulletMaster(object):
def __init__(self, bulletMasterTemplate, masterPositionList, master,
enemy, spawningCycle):
self._name = bulletMasterTemplate._name
self._bulletSpawners = []
for i in bulletMasterTemplate._bulletSpawnerTemplates:
self._bulletSpawners.append(BulletSpawner(i, masterPositionList,
master, enemy, spawningCycle))
def calculateBullets(self):
returnList = []
for i in self._bulletSpawners:
returnList.extend(i.calculateBullets())
return returnList
<|reserved_special_token_1|>
from mg_cus_struct import *
from mg_movement import *
import copy
class BulletTemplate(object):
def __init__(self, animationName, initialVelocity, hitbox):
self._spawningCycle = 0
self._animationName = animationName
self._initialVelocity = initialVelocity
self._movementList = dict()
self._hitbox = hitbox
def addMovementCommand(self, cycle, movementCommand):
self._movementList[cycle] = movementCommand
class BulletSpawnerTemplate(object):
def __init__(self, initialPosition, initialVelocity):
self._spawningCycle = 0
self._initialPosition = initialPosition
self._initialVelocity = initialVelocity
self._movementList = dict()
self._displacement = 0
self._exitLocations = []
self._rotationSpeed = 0
self._initialDelay = 0
self._sprayTimer = []
self._inBetweenTimer = 0
self._rounds = -1
self._bulletTemplate = None
self._maskName = ''
self._maskLayer = 0
def addSprayTimer(self, sprayTimer):
self._sprayTimer.extend(sprayTimer)
def setRounds(self, rounds):
self._rounds = rounds
def setInitialDelay(self, initialDelay):
self._initialDelay = initialDelay
def setInBetweenTimer(self, delay):
self._inBetweenTimer = delay
def addExitLocation(self, location):
self._exitLocations.append(location)
def addBulletTemplate(self, bulletTemplate):
self._bulletTemplate = bulletTemplate
def addMovementCommand(self, cycle, movementCommand):
self._movementList[cycle] = movementCommand
def addMask(self, maskName, maskLayer):
self._maskName = maskName
self._maskLayer = maskLayer
class BulletMasterTemplate(object):
def __init__(self, name):
self._name = name
self._bulletSpawnerTemplates = []
self._powerUpTable = {'life': 0, 'power': 0, 'spell': 0, 'points': 0}
def addBulletSpawnerTemplates(self, bulletSpawnerTemplate):
self._bulletSpawnerTemplates.append(bulletSpawnerTemplate)
class Bullet(MovementCommander):
def __init__(self, bulletTemplate, position, exitAngle, master,
spawningCycle):
temp = copy.deepcopy(bulletTemplate._initialVelocity)
temp._angle = temp._angle + exitAngle
super().__init__(position, temp, spawningCycle)
self.addStartingParameters(position, temp)
self._animationName = bulletTemplate._animationName
for i in bulletTemplate._movementList:
self.addMovementCommandDirect(i, bulletTemplate._movementList[i])
self.calculatePositions(master, master._playerPosition, [-100, -100,
1620, 1180], None)
class BulletSpawner(MovementCommander):
def __init__(self, bulletSpawnerTemplate, masterPosition, master, enemy,
spawningCycle):
self._internalCounter = 0
self._exitLocations = []
self._displacement = 0.0
self._master = master
self._displacement = bulletSpawnerTemplate._displacement
for i in bulletSpawnerTemplate._exitLocations:
self._exitLocations.append(i)
self._rotationSpeed = bulletSpawnerTemplate._rotationSpeed
self._bulletTemplate = bulletSpawnerTemplate._bulletTemplate
self._spawningCycle = enemy._spawningCycle
self._seenCycle = enemy._spawningCycle
self._deathCycle = enemy._deathCycle
self._sprayTimer = bulletSpawnerTemplate._sprayTimer
self._initialDelay = bulletSpawnerTemplate._initialDelay
try:
self._lengthOfSpray = max(self._sprayTimer)
except ValueError:
self._lengthOfSpray = 0
self._inBetweenTimer = bulletSpawnerTemplate._inBetweenTimer
self._rounds = bulletSpawnerTemplate._rounds
super().__init__(bulletSpawnerTemplate._initialPosition,
bulletSpawnerTemplate._initialVelocity, spawningCycle)
self.calculatePositions(master, master._playerPosition, None,
masterPosition)
self._maskName = bulletSpawnerTemplate._maskName
self._maskLayer = bulletSpawnerTemplate._maskLayer
def calculateBullets(self):
returnList = []
mode = 'initialDelayMode'
switchCounter = -1
currentRound = 0
for i in self._positionList:
self._internalCounter = self._internalCounter + 1
switchCounter = switchCounter + 1
if mode == 'initialDelayMode':
if switchCounter >= self._initialDelay:
mode = 'sprayMode'
switchCounter = -1
self._seenCycle = (self._spawningCycle + self.
_internalCounter)
elif mode == 'sprayMode':
if switchCounter in self._sprayTimer:
for j in self._exitLocations:
offset = CUS_Polar(self._displacement, j)
pos = CUS_Point(0.0, 0.0)
pos.add(toPoint(offset))
pos._x = pos._x + i._x
pos._y = pos._y + i._y
bullet = Bullet(self._bulletTemplate, pos, j, self.
_master, self._spawningCycle + self.
_internalCounter)
returnList.append(bullet)
if switchCounter >= self._lengthOfSpray:
mode = 'inBetweenTimerMode'
currentRound = currentRound + 1
switchCounter = -1
elif mode == 'inBetweenTimerMode':
if switchCounter >= self._inBetweenTimer:
mode = 'sprayMode'
switchCounter = -1
if currentRound >= self._rounds and self._rounds is not -1:
mode = 'sprayOverMode'
self._deathCycle = (self._spawningCycle + self.
_internalCounter)
return returnList
class BulletMaster(object):
def __init__(self, bulletMasterTemplate, masterPositionList, master,
enemy, spawningCycle):
self._name = bulletMasterTemplate._name
self._bulletSpawners = []
for i in bulletMasterTemplate._bulletSpawnerTemplates:
self._bulletSpawners.append(BulletSpawner(i, masterPositionList,
master, enemy, spawningCycle))
def calculateBullets(self):
returnList = []
for i in self._bulletSpawners:
returnList.extend(i.calculateBullets())
return returnList
<|reserved_special_token_1|>
#classes that store values related to levels
from mg_cus_struct import *
from mg_movement import *
import copy
class BulletTemplate(object) :
def __init__(self, animationName, initialVelocity, hitbox) :
self._spawningCycle = 0
self._animationName = animationName
self._initialVelocity = initialVelocity
self._movementList = dict()
self._hitbox = hitbox
def addMovementCommand(self, cycle, movementCommand) :
self._movementList[cycle] = movementCommand
class BulletSpawnerTemplate(object) :
def __init__(self, initialPosition, initialVelocity) :
self._spawningCycle = 0
self._initialPosition = initialPosition
self._initialVelocity = initialVelocity
self._movementList = dict()
self._displacement = 0
self._exitLocations = []
self._rotationSpeed = 0
self._initialDelay = 0
self._sprayTimer = []
self._inBetweenTimer = 0
self._rounds = -1
self._bulletTemplate = None
#mask
self._maskName = ""
self._maskLayer = 0
def addSprayTimer(self, sprayTimer) :
self._sprayTimer.extend(sprayTimer)
def setRounds(self, rounds) :
self._rounds = rounds
def setInitialDelay(self, initialDelay) :
self._initialDelay = initialDelay
def setInBetweenTimer(self, delay) :
self._inBetweenTimer = delay
def addExitLocation(self, location) :
self._exitLocations.append(location)
def addBulletTemplate(self, bulletTemplate) :
self._bulletTemplate = bulletTemplate
def addMovementCommand(self, cycle, movementCommand) :
self._movementList[cycle] = movementCommand
def addMask(self, maskName, maskLayer) :
self._maskName = maskName
self._maskLayer = maskLayer
class BulletMasterTemplate(object) :
def __init__(self, name) :
self._name = name
self._bulletSpawnerTemplates = []
self._powerUpTable = {
"life" : 0,
"power" : 0,
"spell" : 0,
"points" : 0,
}
def addBulletSpawnerTemplates(self, bulletSpawnerTemplate) :
self._bulletSpawnerTemplates.append(bulletSpawnerTemplate)
class Bullet(MovementCommander) :
def __init__(self, bulletTemplate, position, exitAngle, master, spawningCycle) :
temp = copy.deepcopy(bulletTemplate._initialVelocity)
temp._angle = temp._angle + exitAngle
super().__init__(position, temp, spawningCycle)
self.addStartingParameters(position, temp)
self._animationName = bulletTemplate._animationName
for i in bulletTemplate._movementList :
self.addMovementCommandDirect(i, bulletTemplate._movementList[i])
self.calculatePositions(master, master._playerPosition, [-100, -100, 1620, 1180], None)
class BulletSpawner(MovementCommander) :
def __init__(self, bulletSpawnerTemplate, masterPosition, master, enemy, spawningCycle) :
self._internalCounter = 0
self._exitLocations = []
self._displacement = 0.0
self._master = master
self._displacement = bulletSpawnerTemplate._displacement
for i in bulletSpawnerTemplate._exitLocations :
self._exitLocations.append(i)
self._rotationSpeed = bulletSpawnerTemplate._rotationSpeed
self._bulletTemplate = bulletSpawnerTemplate._bulletTemplate
self._spawningCycle = enemy._spawningCycle
self._seenCycle = enemy._spawningCycle
self._deathCycle = enemy._deathCycle
self._sprayTimer = bulletSpawnerTemplate._sprayTimer
self._initialDelay = bulletSpawnerTemplate._initialDelay
try :
self._lengthOfSpray = max(self._sprayTimer)
except ValueError:
self._lengthOfSpray = 0
self._inBetweenTimer = bulletSpawnerTemplate._inBetweenTimer
self._rounds = bulletSpawnerTemplate._rounds
super().__init__(bulletSpawnerTemplate._initialPosition, bulletSpawnerTemplate._initialVelocity, spawningCycle)
self.calculatePositions(master, master._playerPosition, None, masterPosition)
#apply masks
self._maskName = bulletSpawnerTemplate._maskName
self._maskLayer = bulletSpawnerTemplate._maskLayer
def calculateBullets(self) :
returnList = []
mode = "initialDelayMode"
switchCounter = -1
currentRound = 0
for i in self._positionList :
self._internalCounter = self._internalCounter + 1
switchCounter = switchCounter + 1
if mode == "initialDelayMode" :
if switchCounter >= self._initialDelay :
mode = "sprayMode"
switchCounter = -1
self._seenCycle = self._spawningCycle + self._internalCounter
elif mode == "sprayMode" :
if switchCounter in self._sprayTimer :
for j in self._exitLocations :
offset = CUS_Polar(self._displacement, j)
pos = CUS_Point(0.0, 0.0)
pos.add(toPoint(offset))
pos._x = pos._x + i._x
pos._y = pos._y + i._y
bullet = Bullet(self._bulletTemplate, pos, j, self._master, self._spawningCycle+self._internalCounter)
returnList.append(bullet)
if switchCounter >= self._lengthOfSpray :
mode = "inBetweenTimerMode"
currentRound = currentRound + 1
switchCounter = -1
elif mode == "inBetweenTimerMode" :
if switchCounter >= self._inBetweenTimer :
mode = "sprayMode"
switchCounter = -1
if currentRound >= self._rounds and self._rounds is not -1 :
mode = "sprayOverMode"
self._deathCycle = self._spawningCycle + self._internalCounter
return returnList
class BulletMaster(object) :
def __init__(self, bulletMasterTemplate, masterPositionList, master, enemy, spawningCycle) :
self._name = bulletMasterTemplate._name
self._bulletSpawners = []
for i in bulletMasterTemplate._bulletSpawnerTemplates :
self._bulletSpawners.append(BulletSpawner(i, masterPositionList, master, enemy, spawningCycle))
def calculateBullets(self) :
returnList = []
for i in self._bulletSpawners :
returnList.extend(i.calculateBullets())
return returnList
|
flexible
|
{
"blob_id": "519746450826d02230a492a99e0b518602d53fcb",
"index": 9932,
"step-1": "<mask token>\n\n\nclass BulletSpawnerTemplate(object):\n <mask token>\n <mask token>\n\n def setRounds(self, rounds):\n self._rounds = rounds\n <mask token>\n\n def setInBetweenTimer(self, delay):\n self._inBetweenTimer = delay\n <mask token>\n\n def addBulletTemplate(self, bulletTemplate):\n self._bulletTemplate = bulletTemplate\n\n def addMovementCommand(self, cycle, movementCommand):\n self._movementList[cycle] = movementCommand\n <mask token>\n\n\nclass BulletMasterTemplate(object):\n\n def __init__(self, name):\n self._name = name\n self._bulletSpawnerTemplates = []\n self._powerUpTable = {'life': 0, 'power': 0, 'spell': 0, 'points': 0}\n\n def addBulletSpawnerTemplates(self, bulletSpawnerTemplate):\n self._bulletSpawnerTemplates.append(bulletSpawnerTemplate)\n\n\nclass Bullet(MovementCommander):\n\n def __init__(self, bulletTemplate, position, exitAngle, master,\n spawningCycle):\n temp = copy.deepcopy(bulletTemplate._initialVelocity)\n temp._angle = temp._angle + exitAngle\n super().__init__(position, temp, spawningCycle)\n self.addStartingParameters(position, temp)\n self._animationName = bulletTemplate._animationName\n for i in bulletTemplate._movementList:\n self.addMovementCommandDirect(i, bulletTemplate._movementList[i])\n self.calculatePositions(master, master._playerPosition, [-100, -100,\n 1620, 1180], None)\n\n\nclass BulletSpawner(MovementCommander):\n\n def __init__(self, bulletSpawnerTemplate, masterPosition, master, enemy,\n spawningCycle):\n self._internalCounter = 0\n self._exitLocations = []\n self._displacement = 0.0\n self._master = master\n self._displacement = bulletSpawnerTemplate._displacement\n for i in bulletSpawnerTemplate._exitLocations:\n self._exitLocations.append(i)\n self._rotationSpeed = bulletSpawnerTemplate._rotationSpeed\n self._bulletTemplate = bulletSpawnerTemplate._bulletTemplate\n self._spawningCycle = enemy._spawningCycle\n self._seenCycle = enemy._spawningCycle\n self._deathCycle = enemy._deathCycle\n self._sprayTimer = bulletSpawnerTemplate._sprayTimer\n self._initialDelay = bulletSpawnerTemplate._initialDelay\n try:\n self._lengthOfSpray = max(self._sprayTimer)\n except ValueError:\n self._lengthOfSpray = 0\n self._inBetweenTimer = bulletSpawnerTemplate._inBetweenTimer\n self._rounds = bulletSpawnerTemplate._rounds\n super().__init__(bulletSpawnerTemplate._initialPosition,\n bulletSpawnerTemplate._initialVelocity, spawningCycle)\n self.calculatePositions(master, master._playerPosition, None,\n masterPosition)\n self._maskName = bulletSpawnerTemplate._maskName\n self._maskLayer = bulletSpawnerTemplate._maskLayer\n\n def calculateBullets(self):\n returnList = []\n mode = 'initialDelayMode'\n switchCounter = -1\n currentRound = 0\n for i in self._positionList:\n self._internalCounter = self._internalCounter + 1\n switchCounter = switchCounter + 1\n if mode == 'initialDelayMode':\n if switchCounter >= self._initialDelay:\n mode = 'sprayMode'\n switchCounter = -1\n self._seenCycle = (self._spawningCycle + self.\n _internalCounter)\n elif mode == 'sprayMode':\n if switchCounter in self._sprayTimer:\n for j in self._exitLocations:\n offset = CUS_Polar(self._displacement, j)\n pos = CUS_Point(0.0, 0.0)\n pos.add(toPoint(offset))\n pos._x = pos._x + i._x\n pos._y = pos._y + i._y\n bullet = Bullet(self._bulletTemplate, pos, j, self.\n _master, self._spawningCycle + self.\n _internalCounter)\n returnList.append(bullet)\n if switchCounter >= self._lengthOfSpray:\n mode = 'inBetweenTimerMode'\n currentRound = currentRound + 1\n switchCounter = -1\n elif mode == 'inBetweenTimerMode':\n if switchCounter >= self._inBetweenTimer:\n mode = 'sprayMode'\n switchCounter = -1\n if currentRound >= self._rounds and self._rounds is not -1:\n mode = 'sprayOverMode'\n self._deathCycle = (self._spawningCycle + self.\n _internalCounter)\n return returnList\n\n\nclass BulletMaster(object):\n\n def __init__(self, bulletMasterTemplate, masterPositionList, master,\n enemy, spawningCycle):\n self._name = bulletMasterTemplate._name\n self._bulletSpawners = []\n for i in bulletMasterTemplate._bulletSpawnerTemplates:\n self._bulletSpawners.append(BulletSpawner(i, masterPositionList,\n master, enemy, spawningCycle))\n\n def calculateBullets(self):\n returnList = []\n for i in self._bulletSpawners:\n returnList.extend(i.calculateBullets())\n return returnList\n",
"step-2": "<mask token>\n\n\nclass BulletSpawnerTemplate(object):\n\n def __init__(self, initialPosition, initialVelocity):\n self._spawningCycle = 0\n self._initialPosition = initialPosition\n self._initialVelocity = initialVelocity\n self._movementList = dict()\n self._displacement = 0\n self._exitLocations = []\n self._rotationSpeed = 0\n self._initialDelay = 0\n self._sprayTimer = []\n self._inBetweenTimer = 0\n self._rounds = -1\n self._bulletTemplate = None\n self._maskName = ''\n self._maskLayer = 0\n <mask token>\n\n def setRounds(self, rounds):\n self._rounds = rounds\n\n def setInitialDelay(self, initialDelay):\n self._initialDelay = initialDelay\n\n def setInBetweenTimer(self, delay):\n self._inBetweenTimer = delay\n <mask token>\n\n def addBulletTemplate(self, bulletTemplate):\n self._bulletTemplate = bulletTemplate\n\n def addMovementCommand(self, cycle, movementCommand):\n self._movementList[cycle] = movementCommand\n\n def addMask(self, maskName, maskLayer):\n self._maskName = maskName\n self._maskLayer = maskLayer\n\n\nclass BulletMasterTemplate(object):\n\n def __init__(self, name):\n self._name = name\n self._bulletSpawnerTemplates = []\n self._powerUpTable = {'life': 0, 'power': 0, 'spell': 0, 'points': 0}\n\n def addBulletSpawnerTemplates(self, bulletSpawnerTemplate):\n self._bulletSpawnerTemplates.append(bulletSpawnerTemplate)\n\n\nclass Bullet(MovementCommander):\n\n def __init__(self, bulletTemplate, position, exitAngle, master,\n spawningCycle):\n temp = copy.deepcopy(bulletTemplate._initialVelocity)\n temp._angle = temp._angle + exitAngle\n super().__init__(position, temp, spawningCycle)\n self.addStartingParameters(position, temp)\n self._animationName = bulletTemplate._animationName\n for i in bulletTemplate._movementList:\n self.addMovementCommandDirect(i, bulletTemplate._movementList[i])\n self.calculatePositions(master, master._playerPosition, [-100, -100,\n 1620, 1180], None)\n\n\nclass BulletSpawner(MovementCommander):\n\n def __init__(self, bulletSpawnerTemplate, masterPosition, master, enemy,\n spawningCycle):\n self._internalCounter = 0\n self._exitLocations = []\n self._displacement = 0.0\n self._master = master\n self._displacement = bulletSpawnerTemplate._displacement\n for i in bulletSpawnerTemplate._exitLocations:\n self._exitLocations.append(i)\n self._rotationSpeed = bulletSpawnerTemplate._rotationSpeed\n self._bulletTemplate = bulletSpawnerTemplate._bulletTemplate\n self._spawningCycle = enemy._spawningCycle\n self._seenCycle = enemy._spawningCycle\n self._deathCycle = enemy._deathCycle\n self._sprayTimer = bulletSpawnerTemplate._sprayTimer\n self._initialDelay = bulletSpawnerTemplate._initialDelay\n try:\n self._lengthOfSpray = max(self._sprayTimer)\n except ValueError:\n self._lengthOfSpray = 0\n self._inBetweenTimer = bulletSpawnerTemplate._inBetweenTimer\n self._rounds = bulletSpawnerTemplate._rounds\n super().__init__(bulletSpawnerTemplate._initialPosition,\n bulletSpawnerTemplate._initialVelocity, spawningCycle)\n self.calculatePositions(master, master._playerPosition, None,\n masterPosition)\n self._maskName = bulletSpawnerTemplate._maskName\n self._maskLayer = bulletSpawnerTemplate._maskLayer\n\n def calculateBullets(self):\n returnList = []\n mode = 'initialDelayMode'\n switchCounter = -1\n currentRound = 0\n for i in self._positionList:\n self._internalCounter = self._internalCounter + 1\n switchCounter = switchCounter + 1\n if mode == 'initialDelayMode':\n if switchCounter >= self._initialDelay:\n mode = 'sprayMode'\n switchCounter = -1\n self._seenCycle = (self._spawningCycle + self.\n _internalCounter)\n elif mode == 'sprayMode':\n if switchCounter in self._sprayTimer:\n for j in self._exitLocations:\n offset = CUS_Polar(self._displacement, j)\n pos = CUS_Point(0.0, 0.0)\n pos.add(toPoint(offset))\n pos._x = pos._x + i._x\n pos._y = pos._y + i._y\n bullet = Bullet(self._bulletTemplate, pos, j, self.\n _master, self._spawningCycle + self.\n _internalCounter)\n returnList.append(bullet)\n if switchCounter >= self._lengthOfSpray:\n mode = 'inBetweenTimerMode'\n currentRound = currentRound + 1\n switchCounter = -1\n elif mode == 'inBetweenTimerMode':\n if switchCounter >= self._inBetweenTimer:\n mode = 'sprayMode'\n switchCounter = -1\n if currentRound >= self._rounds and self._rounds is not -1:\n mode = 'sprayOverMode'\n self._deathCycle = (self._spawningCycle + self.\n _internalCounter)\n return returnList\n\n\nclass BulletMaster(object):\n\n def __init__(self, bulletMasterTemplate, masterPositionList, master,\n enemy, spawningCycle):\n self._name = bulletMasterTemplate._name\n self._bulletSpawners = []\n for i in bulletMasterTemplate._bulletSpawnerTemplates:\n self._bulletSpawners.append(BulletSpawner(i, masterPositionList,\n master, enemy, spawningCycle))\n\n def calculateBullets(self):\n returnList = []\n for i in self._bulletSpawners:\n returnList.extend(i.calculateBullets())\n return returnList\n",
"step-3": "<mask token>\n\n\nclass BulletTemplate(object):\n <mask token>\n <mask token>\n\n\nclass BulletSpawnerTemplate(object):\n\n def __init__(self, initialPosition, initialVelocity):\n self._spawningCycle = 0\n self._initialPosition = initialPosition\n self._initialVelocity = initialVelocity\n self._movementList = dict()\n self._displacement = 0\n self._exitLocations = []\n self._rotationSpeed = 0\n self._initialDelay = 0\n self._sprayTimer = []\n self._inBetweenTimer = 0\n self._rounds = -1\n self._bulletTemplate = None\n self._maskName = ''\n self._maskLayer = 0\n\n def addSprayTimer(self, sprayTimer):\n self._sprayTimer.extend(sprayTimer)\n\n def setRounds(self, rounds):\n self._rounds = rounds\n\n def setInitialDelay(self, initialDelay):\n self._initialDelay = initialDelay\n\n def setInBetweenTimer(self, delay):\n self._inBetweenTimer = delay\n\n def addExitLocation(self, location):\n self._exitLocations.append(location)\n\n def addBulletTemplate(self, bulletTemplate):\n self._bulletTemplate = bulletTemplate\n\n def addMovementCommand(self, cycle, movementCommand):\n self._movementList[cycle] = movementCommand\n\n def addMask(self, maskName, maskLayer):\n self._maskName = maskName\n self._maskLayer = maskLayer\n\n\nclass BulletMasterTemplate(object):\n\n def __init__(self, name):\n self._name = name\n self._bulletSpawnerTemplates = []\n self._powerUpTable = {'life': 0, 'power': 0, 'spell': 0, 'points': 0}\n\n def addBulletSpawnerTemplates(self, bulletSpawnerTemplate):\n self._bulletSpawnerTemplates.append(bulletSpawnerTemplate)\n\n\nclass Bullet(MovementCommander):\n\n def __init__(self, bulletTemplate, position, exitAngle, master,\n spawningCycle):\n temp = copy.deepcopy(bulletTemplate._initialVelocity)\n temp._angle = temp._angle + exitAngle\n super().__init__(position, temp, spawningCycle)\n self.addStartingParameters(position, temp)\n self._animationName = bulletTemplate._animationName\n for i in bulletTemplate._movementList:\n self.addMovementCommandDirect(i, bulletTemplate._movementList[i])\n self.calculatePositions(master, master._playerPosition, [-100, -100,\n 1620, 1180], None)\n\n\nclass BulletSpawner(MovementCommander):\n\n def __init__(self, bulletSpawnerTemplate, masterPosition, master, enemy,\n spawningCycle):\n self._internalCounter = 0\n self._exitLocations = []\n self._displacement = 0.0\n self._master = master\n self._displacement = bulletSpawnerTemplate._displacement\n for i in bulletSpawnerTemplate._exitLocations:\n self._exitLocations.append(i)\n self._rotationSpeed = bulletSpawnerTemplate._rotationSpeed\n self._bulletTemplate = bulletSpawnerTemplate._bulletTemplate\n self._spawningCycle = enemy._spawningCycle\n self._seenCycle = enemy._spawningCycle\n self._deathCycle = enemy._deathCycle\n self._sprayTimer = bulletSpawnerTemplate._sprayTimer\n self._initialDelay = bulletSpawnerTemplate._initialDelay\n try:\n self._lengthOfSpray = max(self._sprayTimer)\n except ValueError:\n self._lengthOfSpray = 0\n self._inBetweenTimer = bulletSpawnerTemplate._inBetweenTimer\n self._rounds = bulletSpawnerTemplate._rounds\n super().__init__(bulletSpawnerTemplate._initialPosition,\n bulletSpawnerTemplate._initialVelocity, spawningCycle)\n self.calculatePositions(master, master._playerPosition, None,\n masterPosition)\n self._maskName = bulletSpawnerTemplate._maskName\n self._maskLayer = bulletSpawnerTemplate._maskLayer\n\n def calculateBullets(self):\n returnList = []\n mode = 'initialDelayMode'\n switchCounter = -1\n currentRound = 0\n for i in self._positionList:\n self._internalCounter = self._internalCounter + 1\n switchCounter = switchCounter + 1\n if mode == 'initialDelayMode':\n if switchCounter >= self._initialDelay:\n mode = 'sprayMode'\n switchCounter = -1\n self._seenCycle = (self._spawningCycle + self.\n _internalCounter)\n elif mode == 'sprayMode':\n if switchCounter in self._sprayTimer:\n for j in self._exitLocations:\n offset = CUS_Polar(self._displacement, j)\n pos = CUS_Point(0.0, 0.0)\n pos.add(toPoint(offset))\n pos._x = pos._x + i._x\n pos._y = pos._y + i._y\n bullet = Bullet(self._bulletTemplate, pos, j, self.\n _master, self._spawningCycle + self.\n _internalCounter)\n returnList.append(bullet)\n if switchCounter >= self._lengthOfSpray:\n mode = 'inBetweenTimerMode'\n currentRound = currentRound + 1\n switchCounter = -1\n elif mode == 'inBetweenTimerMode':\n if switchCounter >= self._inBetweenTimer:\n mode = 'sprayMode'\n switchCounter = -1\n if currentRound >= self._rounds and self._rounds is not -1:\n mode = 'sprayOverMode'\n self._deathCycle = (self._spawningCycle + self.\n _internalCounter)\n return returnList\n\n\nclass BulletMaster(object):\n\n def __init__(self, bulletMasterTemplate, masterPositionList, master,\n enemy, spawningCycle):\n self._name = bulletMasterTemplate._name\n self._bulletSpawners = []\n for i in bulletMasterTemplate._bulletSpawnerTemplates:\n self._bulletSpawners.append(BulletSpawner(i, masterPositionList,\n master, enemy, spawningCycle))\n\n def calculateBullets(self):\n returnList = []\n for i in self._bulletSpawners:\n returnList.extend(i.calculateBullets())\n return returnList\n",
"step-4": "from mg_cus_struct import *\nfrom mg_movement import *\nimport copy\n\n\nclass BulletTemplate(object):\n\n def __init__(self, animationName, initialVelocity, hitbox):\n self._spawningCycle = 0\n self._animationName = animationName\n self._initialVelocity = initialVelocity\n self._movementList = dict()\n self._hitbox = hitbox\n\n def addMovementCommand(self, cycle, movementCommand):\n self._movementList[cycle] = movementCommand\n\n\nclass BulletSpawnerTemplate(object):\n\n def __init__(self, initialPosition, initialVelocity):\n self._spawningCycle = 0\n self._initialPosition = initialPosition\n self._initialVelocity = initialVelocity\n self._movementList = dict()\n self._displacement = 0\n self._exitLocations = []\n self._rotationSpeed = 0\n self._initialDelay = 0\n self._sprayTimer = []\n self._inBetweenTimer = 0\n self._rounds = -1\n self._bulletTemplate = None\n self._maskName = ''\n self._maskLayer = 0\n\n def addSprayTimer(self, sprayTimer):\n self._sprayTimer.extend(sprayTimer)\n\n def setRounds(self, rounds):\n self._rounds = rounds\n\n def setInitialDelay(self, initialDelay):\n self._initialDelay = initialDelay\n\n def setInBetweenTimer(self, delay):\n self._inBetweenTimer = delay\n\n def addExitLocation(self, location):\n self._exitLocations.append(location)\n\n def addBulletTemplate(self, bulletTemplate):\n self._bulletTemplate = bulletTemplate\n\n def addMovementCommand(self, cycle, movementCommand):\n self._movementList[cycle] = movementCommand\n\n def addMask(self, maskName, maskLayer):\n self._maskName = maskName\n self._maskLayer = maskLayer\n\n\nclass BulletMasterTemplate(object):\n\n def __init__(self, name):\n self._name = name\n self._bulletSpawnerTemplates = []\n self._powerUpTable = {'life': 0, 'power': 0, 'spell': 0, 'points': 0}\n\n def addBulletSpawnerTemplates(self, bulletSpawnerTemplate):\n self._bulletSpawnerTemplates.append(bulletSpawnerTemplate)\n\n\nclass Bullet(MovementCommander):\n\n def __init__(self, bulletTemplate, position, exitAngle, master,\n spawningCycle):\n temp = copy.deepcopy(bulletTemplate._initialVelocity)\n temp._angle = temp._angle + exitAngle\n super().__init__(position, temp, spawningCycle)\n self.addStartingParameters(position, temp)\n self._animationName = bulletTemplate._animationName\n for i in bulletTemplate._movementList:\n self.addMovementCommandDirect(i, bulletTemplate._movementList[i])\n self.calculatePositions(master, master._playerPosition, [-100, -100,\n 1620, 1180], None)\n\n\nclass BulletSpawner(MovementCommander):\n\n def __init__(self, bulletSpawnerTemplate, masterPosition, master, enemy,\n spawningCycle):\n self._internalCounter = 0\n self._exitLocations = []\n self._displacement = 0.0\n self._master = master\n self._displacement = bulletSpawnerTemplate._displacement\n for i in bulletSpawnerTemplate._exitLocations:\n self._exitLocations.append(i)\n self._rotationSpeed = bulletSpawnerTemplate._rotationSpeed\n self._bulletTemplate = bulletSpawnerTemplate._bulletTemplate\n self._spawningCycle = enemy._spawningCycle\n self._seenCycle = enemy._spawningCycle\n self._deathCycle = enemy._deathCycle\n self._sprayTimer = bulletSpawnerTemplate._sprayTimer\n self._initialDelay = bulletSpawnerTemplate._initialDelay\n try:\n self._lengthOfSpray = max(self._sprayTimer)\n except ValueError:\n self._lengthOfSpray = 0\n self._inBetweenTimer = bulletSpawnerTemplate._inBetweenTimer\n self._rounds = bulletSpawnerTemplate._rounds\n super().__init__(bulletSpawnerTemplate._initialPosition,\n bulletSpawnerTemplate._initialVelocity, spawningCycle)\n self.calculatePositions(master, master._playerPosition, None,\n masterPosition)\n self._maskName = bulletSpawnerTemplate._maskName\n self._maskLayer = bulletSpawnerTemplate._maskLayer\n\n def calculateBullets(self):\n returnList = []\n mode = 'initialDelayMode'\n switchCounter = -1\n currentRound = 0\n for i in self._positionList:\n self._internalCounter = self._internalCounter + 1\n switchCounter = switchCounter + 1\n if mode == 'initialDelayMode':\n if switchCounter >= self._initialDelay:\n mode = 'sprayMode'\n switchCounter = -1\n self._seenCycle = (self._spawningCycle + self.\n _internalCounter)\n elif mode == 'sprayMode':\n if switchCounter in self._sprayTimer:\n for j in self._exitLocations:\n offset = CUS_Polar(self._displacement, j)\n pos = CUS_Point(0.0, 0.0)\n pos.add(toPoint(offset))\n pos._x = pos._x + i._x\n pos._y = pos._y + i._y\n bullet = Bullet(self._bulletTemplate, pos, j, self.\n _master, self._spawningCycle + self.\n _internalCounter)\n returnList.append(bullet)\n if switchCounter >= self._lengthOfSpray:\n mode = 'inBetweenTimerMode'\n currentRound = currentRound + 1\n switchCounter = -1\n elif mode == 'inBetweenTimerMode':\n if switchCounter >= self._inBetweenTimer:\n mode = 'sprayMode'\n switchCounter = -1\n if currentRound >= self._rounds and self._rounds is not -1:\n mode = 'sprayOverMode'\n self._deathCycle = (self._spawningCycle + self.\n _internalCounter)\n return returnList\n\n\nclass BulletMaster(object):\n\n def __init__(self, bulletMasterTemplate, masterPositionList, master,\n enemy, spawningCycle):\n self._name = bulletMasterTemplate._name\n self._bulletSpawners = []\n for i in bulletMasterTemplate._bulletSpawnerTemplates:\n self._bulletSpawners.append(BulletSpawner(i, masterPositionList,\n master, enemy, spawningCycle))\n\n def calculateBullets(self):\n returnList = []\n for i in self._bulletSpawners:\n returnList.extend(i.calculateBullets())\n return returnList\n",
"step-5": "#classes that store values related to levels\nfrom mg_cus_struct import *\nfrom mg_movement import *\nimport copy\n\nclass BulletTemplate(object) :\n def __init__(self, animationName, initialVelocity, hitbox) :\n self._spawningCycle = 0\n self._animationName = animationName\n self._initialVelocity = initialVelocity\n self._movementList = dict()\n self._hitbox = hitbox\n\n def addMovementCommand(self, cycle, movementCommand) :\n self._movementList[cycle] = movementCommand\n\nclass BulletSpawnerTemplate(object) :\n def __init__(self, initialPosition, initialVelocity) :\n self._spawningCycle = 0\n self._initialPosition = initialPosition\n self._initialVelocity = initialVelocity\n \n self._movementList = dict()\n \n self._displacement = 0\n self._exitLocations = []\n self._rotationSpeed = 0\n \n self._initialDelay = 0\n self._sprayTimer = []\n self._inBetweenTimer = 0\n self._rounds = -1\n \n self._bulletTemplate = None\n\n #mask\n self._maskName = \"\"\n self._maskLayer = 0\n\n def addSprayTimer(self, sprayTimer) :\n self._sprayTimer.extend(sprayTimer)\n\n def setRounds(self, rounds) :\n self._rounds = rounds\n\n def setInitialDelay(self, initialDelay) :\n self._initialDelay = initialDelay\n\n def setInBetweenTimer(self, delay) :\n self._inBetweenTimer = delay\n\n def addExitLocation(self, location) :\n self._exitLocations.append(location)\n\n def addBulletTemplate(self, bulletTemplate) :\n self._bulletTemplate = bulletTemplate\n\n def addMovementCommand(self, cycle, movementCommand) :\n self._movementList[cycle] = movementCommand\n\n def addMask(self, maskName, maskLayer) :\n self._maskName = maskName\n self._maskLayer = maskLayer\n \nclass BulletMasterTemplate(object) :\n def __init__(self, name) :\n self._name = name\n self._bulletSpawnerTemplates = []\n self._powerUpTable = {\n \"life\" : 0,\n \"power\" : 0,\n \"spell\" : 0,\n \"points\" : 0, \n }\n\n def addBulletSpawnerTemplates(self, bulletSpawnerTemplate) :\n self._bulletSpawnerTemplates.append(bulletSpawnerTemplate)\n \nclass Bullet(MovementCommander) :\n def __init__(self, bulletTemplate, position, exitAngle, master, spawningCycle) :\n temp = copy.deepcopy(bulletTemplate._initialVelocity)\n temp._angle = temp._angle + exitAngle\n\n super().__init__(position, temp, spawningCycle)\n self.addStartingParameters(position, temp)\n\n self._animationName = bulletTemplate._animationName\n\n for i in bulletTemplate._movementList :\n self.addMovementCommandDirect(i, bulletTemplate._movementList[i])\n\n self.calculatePositions(master, master._playerPosition, [-100, -100, 1620, 1180], None)\n\nclass BulletSpawner(MovementCommander) :\n def __init__(self, bulletSpawnerTemplate, masterPosition, master, enemy, spawningCycle) :\n self._internalCounter = 0\n self._exitLocations = []\n self._displacement = 0.0\n\n self._master = master\n \n self._displacement = bulletSpawnerTemplate._displacement \n for i in bulletSpawnerTemplate._exitLocations :\n self._exitLocations.append(i)\n self._rotationSpeed = bulletSpawnerTemplate._rotationSpeed\n self._bulletTemplate = bulletSpawnerTemplate._bulletTemplate\n\n self._spawningCycle = enemy._spawningCycle\n self._seenCycle = enemy._spawningCycle\n self._deathCycle = enemy._deathCycle\n \n self._sprayTimer = bulletSpawnerTemplate._sprayTimer\n self._initialDelay = bulletSpawnerTemplate._initialDelay\n \n try :\n self._lengthOfSpray = max(self._sprayTimer)\n except ValueError:\n self._lengthOfSpray = 0\n \n self._inBetweenTimer = bulletSpawnerTemplate._inBetweenTimer\n self._rounds = bulletSpawnerTemplate._rounds\n\n super().__init__(bulletSpawnerTemplate._initialPosition, bulletSpawnerTemplate._initialVelocity, spawningCycle)\n \n self.calculatePositions(master, master._playerPosition, None, masterPosition)\n\n #apply masks\n self._maskName = bulletSpawnerTemplate._maskName\n self._maskLayer = bulletSpawnerTemplate._maskLayer\n\n def calculateBullets(self) :\n returnList = []\n mode = \"initialDelayMode\"\n switchCounter = -1\n currentRound = 0\n for i in self._positionList :\n self._internalCounter = self._internalCounter + 1\n switchCounter = switchCounter + 1\n if mode == \"initialDelayMode\" :\n if switchCounter >= self._initialDelay :\n mode = \"sprayMode\"\n switchCounter = -1\n self._seenCycle = self._spawningCycle + self._internalCounter\n elif mode == \"sprayMode\" :\n if switchCounter in self._sprayTimer :\n for j in self._exitLocations :\n offset = CUS_Polar(self._displacement, j)\n pos = CUS_Point(0.0, 0.0)\n pos.add(toPoint(offset))\n pos._x = pos._x + i._x\n pos._y = pos._y + i._y\n bullet = Bullet(self._bulletTemplate, pos, j, self._master, self._spawningCycle+self._internalCounter)\n returnList.append(bullet)\n if switchCounter >= self._lengthOfSpray :\n mode = \"inBetweenTimerMode\"\n currentRound = currentRound + 1\n switchCounter = -1\n elif mode == \"inBetweenTimerMode\" :\n if switchCounter >= self._inBetweenTimer :\n mode = \"sprayMode\"\n switchCounter = -1\n if currentRound >= self._rounds and self._rounds is not -1 :\n mode = \"sprayOverMode\"\n self._deathCycle = self._spawningCycle + self._internalCounter\n\n return returnList\n \nclass BulletMaster(object) :\n def __init__(self, bulletMasterTemplate, masterPositionList, master, enemy, spawningCycle) :\n self._name = bulletMasterTemplate._name\n\n self._bulletSpawners = []\n\n for i in bulletMasterTemplate._bulletSpawnerTemplates :\n self._bulletSpawners.append(BulletSpawner(i, masterPositionList, master, enemy, spawningCycle))\n\n def calculateBullets(self) :\n returnList = []\n for i in self._bulletSpawners :\n returnList.extend(i.calculateBullets())\n\n return returnList\n \n\n \n \n",
"step-ids": [
16,
19,
22,
25,
26
]
}
|
[
16,
19,
22,
25,
26
] |
import requests
import json
import pandas as pd
from sqlalchemy import create_engine
from sqlalchemy.types import VARCHAR,INT,FLOAT,BIGINT
import time
from tqdm import tqdm
#数据库联接设置
connect_info = 'mysql+pymysql://root:rootroot@localhost:3306/db1?charset=UTF8MB4'
engine = create_engine(connect_info)
sql = '''
select * from smzdm;
'''
#从数据库中读取数据
df = pd.read_sql_query(sql, engine)
#排除字数小于5的评论
df_new = df[df['comment'].str.len()>=5]
#设置百度情感分析api
host = 'https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=你的client_id&client_secret=你的client_secret'
response = requests.get(host)
if response:
print(response.json())
access_token = response.json()['access_token']
url = 'https://aip.baidubce.com/rpc/2.0/nlp/v1/sentiment_classify?access_token='+access_token
print(url)
headers={'Content-Type':'application/json'}
#情感分析函数
def sentiment(text):
global url
global headers
body={'text':text}
try:
r = requests.post(url,headers = headers,data=json.dumps(body))
dic=r.json()
except Exception as e:
print('分析失败')
pass
time.sleep(0.3)#设置分析频率,不设置引发QPS超限额错误
return dic['items'][0]['sentiment']
tqdm.pandas()
df_new_senti = df_new.copy()
df_new_senti['sentiment'] = df_new['comment'].progress_apply(sentiment)#使用tqdm进度条
df_new_senti.sort_values(by='author',inplace=True)
df_new_senti['id']=df_new_senti.index
#保存到数据库
df_new_senti.to_sql(name = 'smzdm_senti',con = engine,if_exists = 'replace',index = False,dtype = {'id':BIGINT,'author': VARCHAR(length=255),'comment':VARCHAR(length=255),'sentiment':FLOAT(12,10)})
|
normal
|
{
"blob_id": "a95e64877a1fc9f8109f1293b4ae9176f4f64647",
"index": 3090,
"step-1": "<mask token>\n\n\ndef sentiment(text):\n global url\n global headers\n body = {'text': text}\n try:\n r = requests.post(url, headers=headers, data=json.dumps(body))\n dic = r.json()\n except Exception as e:\n print('分析失败')\n pass\n time.sleep(0.3)\n return dic['items'][0]['sentiment']\n\n\n<mask token>\n",
"step-2": "<mask token>\nif response:\n print(response.json())\n<mask token>\nprint(url)\n<mask token>\n\n\ndef sentiment(text):\n global url\n global headers\n body = {'text': text}\n try:\n r = requests.post(url, headers=headers, data=json.dumps(body))\n dic = r.json()\n except Exception as e:\n print('分析失败')\n pass\n time.sleep(0.3)\n return dic['items'][0]['sentiment']\n\n\ntqdm.pandas()\n<mask token>\ndf_new_senti.sort_values(by='author', inplace=True)\n<mask token>\ndf_new_senti.to_sql(name='smzdm_senti', con=engine, if_exists='replace',\n index=False, dtype={'id': BIGINT, 'author': VARCHAR(length=255),\n 'comment': VARCHAR(length=255), 'sentiment': FLOAT(12, 10)})\n",
"step-3": "<mask token>\nconnect_info = (\n 'mysql+pymysql://root:rootroot@localhost:3306/db1?charset=UTF8MB4')\nengine = create_engine(connect_info)\nsql = \"\"\"\n select * from smzdm;\n \"\"\"\ndf = pd.read_sql_query(sql, engine)\ndf_new = df[df['comment'].str.len() >= 5]\nhost = (\n 'https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=你的client_id&client_secret=你的client_secret'\n )\nresponse = requests.get(host)\nif response:\n print(response.json())\naccess_token = response.json()['access_token']\nurl = (\n 'https://aip.baidubce.com/rpc/2.0/nlp/v1/sentiment_classify?access_token='\n + access_token)\nprint(url)\nheaders = {'Content-Type': 'application/json'}\n\n\ndef sentiment(text):\n global url\n global headers\n body = {'text': text}\n try:\n r = requests.post(url, headers=headers, data=json.dumps(body))\n dic = r.json()\n except Exception as e:\n print('分析失败')\n pass\n time.sleep(0.3)\n return dic['items'][0]['sentiment']\n\n\ntqdm.pandas()\ndf_new_senti = df_new.copy()\ndf_new_senti['sentiment'] = df_new['comment'].progress_apply(sentiment)\ndf_new_senti.sort_values(by='author', inplace=True)\ndf_new_senti['id'] = df_new_senti.index\ndf_new_senti.to_sql(name='smzdm_senti', con=engine, if_exists='replace',\n index=False, dtype={'id': BIGINT, 'author': VARCHAR(length=255),\n 'comment': VARCHAR(length=255), 'sentiment': FLOAT(12, 10)})\n",
"step-4": "import requests\nimport json\nimport pandas as pd\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.types import VARCHAR, INT, FLOAT, BIGINT\nimport time\nfrom tqdm import tqdm\nconnect_info = (\n 'mysql+pymysql://root:rootroot@localhost:3306/db1?charset=UTF8MB4')\nengine = create_engine(connect_info)\nsql = \"\"\"\n select * from smzdm;\n \"\"\"\ndf = pd.read_sql_query(sql, engine)\ndf_new = df[df['comment'].str.len() >= 5]\nhost = (\n 'https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=你的client_id&client_secret=你的client_secret'\n )\nresponse = requests.get(host)\nif response:\n print(response.json())\naccess_token = response.json()['access_token']\nurl = (\n 'https://aip.baidubce.com/rpc/2.0/nlp/v1/sentiment_classify?access_token='\n + access_token)\nprint(url)\nheaders = {'Content-Type': 'application/json'}\n\n\ndef sentiment(text):\n global url\n global headers\n body = {'text': text}\n try:\n r = requests.post(url, headers=headers, data=json.dumps(body))\n dic = r.json()\n except Exception as e:\n print('分析失败')\n pass\n time.sleep(0.3)\n return dic['items'][0]['sentiment']\n\n\ntqdm.pandas()\ndf_new_senti = df_new.copy()\ndf_new_senti['sentiment'] = df_new['comment'].progress_apply(sentiment)\ndf_new_senti.sort_values(by='author', inplace=True)\ndf_new_senti['id'] = df_new_senti.index\ndf_new_senti.to_sql(name='smzdm_senti', con=engine, if_exists='replace',\n index=False, dtype={'id': BIGINT, 'author': VARCHAR(length=255),\n 'comment': VARCHAR(length=255), 'sentiment': FLOAT(12, 10)})\n",
"step-5": "import requests \nimport json\nimport pandas as pd\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.types import VARCHAR,INT,FLOAT,BIGINT\nimport time\nfrom tqdm import tqdm\n#数据库联接设置\nconnect_info = 'mysql+pymysql://root:rootroot@localhost:3306/db1?charset=UTF8MB4'\nengine = create_engine(connect_info) \nsql = '''\n select * from smzdm;\n '''\n#从数据库中读取数据\ndf = pd.read_sql_query(sql, engine)\n#排除字数小于5的评论\ndf_new = df[df['comment'].str.len()>=5]\n#设置百度情感分析api\nhost = 'https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=你的client_id&client_secret=你的client_secret'\nresponse = requests.get(host)\nif response:\n print(response.json())\naccess_token = response.json()['access_token']\nurl = 'https://aip.baidubce.com/rpc/2.0/nlp/v1/sentiment_classify?access_token='+access_token\nprint(url)\nheaders={'Content-Type':'application/json'}\n\n#情感分析函数\ndef sentiment(text):\n global url\n global headers\n body={'text':text}\n try:\n r = requests.post(url,headers = headers,data=json.dumps(body))\n dic=r.json()\n except Exception as e:\n print('分析失败')\n pass\n time.sleep(0.3)#设置分析频率,不设置引发QPS超限额错误\n return dic['items'][0]['sentiment']\n\ntqdm.pandas()\ndf_new_senti = df_new.copy()\ndf_new_senti['sentiment'] = df_new['comment'].progress_apply(sentiment)#使用tqdm进度条\ndf_new_senti.sort_values(by='author',inplace=True)\ndf_new_senti['id']=df_new_senti.index\n#保存到数据库\ndf_new_senti.to_sql(name = 'smzdm_senti',con = engine,if_exists = 'replace',index = False,dtype = {'id':BIGINT,'author': VARCHAR(length=255),'comment':VARCHAR(length=255),'sentiment':FLOAT(12,10)})",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import sqlite3
# cur.execute('CREATE TABLE admin(username TEXT,password TEXT)')
# conn.commit()
# cur.execute("INSERT INTO admin VALUES('nilesh','nilesh')")
# conn.commit()
def verif_admin(username, password):
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
print(username)
print(password)
data = cur.execute('SELECT password FROM admin WHERE username = "{}"'.format(username)).fetchall()[0][0]
conn.close()
if password == data:
return True
else:
return False
except:
return False
def add_product(id_, name, quantity, cost):
if id_ == '' and name == '' and quantity == '' and cost == '':
return False, " You Cannot Leave It Empty "
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
print(id_, name, quantity, cost)
try:
quantity = int(quantity)
cost = int(cost)
print(id_, name, quantity, cost)
print(type(id_), type(name), type(quantity), type(cost))
check = cur.execute(f"SELECT * FROM products WHERE id = '{id_}'").fetchall()
if len(check) > 0:
return False, " This Product Already Exist Try Updating "
else:
cur.execute('INSERT INTO products VALUES("{}","{}",{},{})'.format(id_, name, quantity, cost))
conn.commit()
conn.close()
return True, " Product Added Successfully "
except:
return False, " Quantity and Cost are Integers "
except:
return False, " Failed Connecting Database "
def get_product_detail(prod_id):
if prod_id == '':
return False, " Enter Product Id "
else:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute(f"SELECT rowid,* FROM products where id='{prod_id}'").fetchall()
conn.close()
if len(data) == 0:
return False, " Product Don't Exist "
return True, data
def update_delete_product(rowid, id_, name, quantity, cost, qry):
if id_ == '' and name == '' and quantity == '' and cost == '':
return False, " You Cannot Leave It Empty "
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
try:
quantity = int(quantity)
cost = int(cost)
if qry == 'update':
cur.execute(
f"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}")
conn.commit()
return True, " Product Updated Successfully "
if qry == "delete":
cur.execute(f"DELETE FROM products WHERE rowid={rowid} ")
conn.commit()
return True, " Product Deleted Successfully "
conn.commit()
conn.close()
except:
return False, " Quantity and Cost are Integers "
except:
return False, " Failed Connecting Database "
def showProducts_all():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute("SELECT * FROM products").fetchall()
return True, data
def added_to_cart(prod_id, qry):
if prod_id == '':
return False, " Please Enter Product Id ",1
else:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
if qry == "add":
try:
cur.execute("""CREATE TABLE cart(
id TEXT,
name TEXT,
quantity INTEGER,
cost INTEGER) """)
except:
pass
data = cur.execute(f"""SELECT * FROM products WHERE id = '{prod_id}'""").fetchall()
cart_check = cur.execute(f"""SELECT * FROM cart WHERE id = '{prod_id}' """).fetchall()
if len(cart_check) == 0:
cur.execute(f"""INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})""")
conn.commit()
cur.execute(f"""UPDATE products SET quantity = {(data[0][2] - 1)} WHERE id ='{prod_id}'""")
conn.commit()
all_prods = cur.execute("SELECT * FROM cart").fetchall()
return True, " Product Added To Cart Successfully ",all_prods
elif len(cart_check) > 0:
cur.execute(
f"""UPDATE cart SET quantity = {(cart_check[0][2] + 1)},cost={(cart_check[0][3] + data[0][3])} WHERE id ='{prod_id}'""")
conn.commit()
cur.execute(f"""UPDATE products SET quantity = {(data[0][2] - 1)} WHERE id ='{prod_id}'""")
conn.commit()
all_prods = cur.execute("SELECT * FROM cart").fetchall()
return True, " Product Added To Cart Successfully ",all_prods
if qry == "remove":
cart_check = cur.execute(f"""SELECT * FROM cart WHERE id = '{prod_id}' """).fetchall()
if len(cart_check) == 0:
all_prods = cur.execute("SELECT * FROM cart").fetchall()
return True," Product Doesn't Exist ",all_prods
elif len(cart_check) > 0:
data = cur.execute(f"""SELECT * FROM products WHERE id = '{prod_id}'""").fetchall()
cur.execute(f"UPDATE products SET quantity = {(data[0][2]+cart_check[0][2])} WHERE id ='{prod_id}'")
conn.commit()
cur.execute(f"DELETE FROM cart WHERE id = '{prod_id}'")
conn.commit()
all_prods = cur.execute("SELECT * FROM cart").fetchall()
return True," Product Deleted Successfully ",all_prods
conn.close()
def get_cost():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute("SELECT * FROM cart").fetchall()
cost = 0
for i in data:
cost = cost+i[3]
return cost
def done_Drp():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
cur.execute("DROP TABLE cart")
conn.commit()
|
normal
|
{
"blob_id": "88d0ced41a8f176a8a12bba6406b4162ea6dfc52",
"index": 9308,
"step-1": "<mask token>\n\n\ndef update_delete_product(rowid, id_, name, quantity, cost, qry):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n try:\n quantity = int(quantity)\n cost = int(cost)\n if qry == 'update':\n cur.execute(\n f\"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}\"\n )\n conn.commit()\n return True, ' Product Updated Successfully '\n if qry == 'delete':\n cur.execute(f'DELETE FROM products WHERE rowid={rowid} ')\n conn.commit()\n return True, ' Product Deleted Successfully '\n conn.commit()\n conn.close()\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\n<mask token>\n\n\ndef added_to_cart(prod_id, qry):\n if prod_id == '':\n return False, ' Please Enter Product Id ', 1\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n if qry == 'add':\n try:\n cur.execute(\n \"\"\"CREATE TABLE cart(\n id TEXT,\n name TEXT,\n quantity INTEGER,\n cost INTEGER) \"\"\"\n )\n except:\n pass\n data = cur.execute(f\"SELECT * FROM products WHERE id = '{prod_id}'\"\n ).fetchall()\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n cur.execute(\n f\"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n elif len(cart_check) > 0:\n cur.execute(\n f\"UPDATE cart SET quantity = {cart_check[0][2] + 1},cost={cart_check[0][3] + data[0][3]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n if qry == 'remove':\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, \" Product Doesn't Exist \", all_prods\n elif len(cart_check) > 0:\n data = cur.execute(\n f\"SELECT * FROM products WHERE id = '{prod_id}'\").fetchall(\n )\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] + cart_check[0][2]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(f\"DELETE FROM cart WHERE id = '{prod_id}'\")\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Deleted Successfully ', all_prods\n conn.close()\n\n\n<mask token>\n\n\ndef done_Drp():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n cur.execute('DROP TABLE cart')\n conn.commit()\n",
"step-2": "<mask token>\n\n\ndef add_product(id_, name, quantity, cost):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(id_, name, quantity, cost)\n try:\n quantity = int(quantity)\n cost = int(cost)\n print(id_, name, quantity, cost)\n print(type(id_), type(name), type(quantity), type(cost))\n check = cur.execute(f\"SELECT * FROM products WHERE id = '{id_}'\"\n ).fetchall()\n if len(check) > 0:\n return False, ' This Product Already Exist Try Updating '\n else:\n cur.execute('INSERT INTO products VALUES(\"{}\",\"{}\",{},{})'.\n format(id_, name, quantity, cost))\n conn.commit()\n conn.close()\n return True, ' Product Added Successfully '\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\ndef get_product_detail(prod_id):\n if prod_id == '':\n return False, ' Enter Product Id '\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute(f\"SELECT rowid,* FROM products where id='{prod_id}'\"\n ).fetchall()\n conn.close()\n if len(data) == 0:\n return False, \" Product Don't Exist \"\n return True, data\n\n\ndef update_delete_product(rowid, id_, name, quantity, cost, qry):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n try:\n quantity = int(quantity)\n cost = int(cost)\n if qry == 'update':\n cur.execute(\n f\"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}\"\n )\n conn.commit()\n return True, ' Product Updated Successfully '\n if qry == 'delete':\n cur.execute(f'DELETE FROM products WHERE rowid={rowid} ')\n conn.commit()\n return True, ' Product Deleted Successfully '\n conn.commit()\n conn.close()\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\ndef showProducts_all():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute('SELECT * FROM products').fetchall()\n return True, data\n\n\ndef added_to_cart(prod_id, qry):\n if prod_id == '':\n return False, ' Please Enter Product Id ', 1\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n if qry == 'add':\n try:\n cur.execute(\n \"\"\"CREATE TABLE cart(\n id TEXT,\n name TEXT,\n quantity INTEGER,\n cost INTEGER) \"\"\"\n )\n except:\n pass\n data = cur.execute(f\"SELECT * FROM products WHERE id = '{prod_id}'\"\n ).fetchall()\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n cur.execute(\n f\"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n elif len(cart_check) > 0:\n cur.execute(\n f\"UPDATE cart SET quantity = {cart_check[0][2] + 1},cost={cart_check[0][3] + data[0][3]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n if qry == 'remove':\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, \" Product Doesn't Exist \", all_prods\n elif len(cart_check) > 0:\n data = cur.execute(\n f\"SELECT * FROM products WHERE id = '{prod_id}'\").fetchall(\n )\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] + cart_check[0][2]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(f\"DELETE FROM cart WHERE id = '{prod_id}'\")\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Deleted Successfully ', all_prods\n conn.close()\n\n\n<mask token>\n\n\ndef done_Drp():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n cur.execute('DROP TABLE cart')\n conn.commit()\n",
"step-3": "<mask token>\n\n\ndef verif_admin(username, password):\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(username)\n print(password)\n data = cur.execute('SELECT password FROM admin WHERE username = \"{}\"'\n .format(username)).fetchall()[0][0]\n conn.close()\n if password == data:\n return True\n else:\n return False\n except:\n return False\n\n\ndef add_product(id_, name, quantity, cost):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(id_, name, quantity, cost)\n try:\n quantity = int(quantity)\n cost = int(cost)\n print(id_, name, quantity, cost)\n print(type(id_), type(name), type(quantity), type(cost))\n check = cur.execute(f\"SELECT * FROM products WHERE id = '{id_}'\"\n ).fetchall()\n if len(check) > 0:\n return False, ' This Product Already Exist Try Updating '\n else:\n cur.execute('INSERT INTO products VALUES(\"{}\",\"{}\",{},{})'.\n format(id_, name, quantity, cost))\n conn.commit()\n conn.close()\n return True, ' Product Added Successfully '\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\ndef get_product_detail(prod_id):\n if prod_id == '':\n return False, ' Enter Product Id '\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute(f\"SELECT rowid,* FROM products where id='{prod_id}'\"\n ).fetchall()\n conn.close()\n if len(data) == 0:\n return False, \" Product Don't Exist \"\n return True, data\n\n\ndef update_delete_product(rowid, id_, name, quantity, cost, qry):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n try:\n quantity = int(quantity)\n cost = int(cost)\n if qry == 'update':\n cur.execute(\n f\"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}\"\n )\n conn.commit()\n return True, ' Product Updated Successfully '\n if qry == 'delete':\n cur.execute(f'DELETE FROM products WHERE rowid={rowid} ')\n conn.commit()\n return True, ' Product Deleted Successfully '\n conn.commit()\n conn.close()\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\ndef showProducts_all():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute('SELECT * FROM products').fetchall()\n return True, data\n\n\ndef added_to_cart(prod_id, qry):\n if prod_id == '':\n return False, ' Please Enter Product Id ', 1\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n if qry == 'add':\n try:\n cur.execute(\n \"\"\"CREATE TABLE cart(\n id TEXT,\n name TEXT,\n quantity INTEGER,\n cost INTEGER) \"\"\"\n )\n except:\n pass\n data = cur.execute(f\"SELECT * FROM products WHERE id = '{prod_id}'\"\n ).fetchall()\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n cur.execute(\n f\"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n elif len(cart_check) > 0:\n cur.execute(\n f\"UPDATE cart SET quantity = {cart_check[0][2] + 1},cost={cart_check[0][3] + data[0][3]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n if qry == 'remove':\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, \" Product Doesn't Exist \", all_prods\n elif len(cart_check) > 0:\n data = cur.execute(\n f\"SELECT * FROM products WHERE id = '{prod_id}'\").fetchall(\n )\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] + cart_check[0][2]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(f\"DELETE FROM cart WHERE id = '{prod_id}'\")\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Deleted Successfully ', all_prods\n conn.close()\n\n\n<mask token>\n\n\ndef done_Drp():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n cur.execute('DROP TABLE cart')\n conn.commit()\n",
"step-4": "import sqlite3\n\n\ndef verif_admin(username, password):\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(username)\n print(password)\n data = cur.execute('SELECT password FROM admin WHERE username = \"{}\"'\n .format(username)).fetchall()[0][0]\n conn.close()\n if password == data:\n return True\n else:\n return False\n except:\n return False\n\n\ndef add_product(id_, name, quantity, cost):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(id_, name, quantity, cost)\n try:\n quantity = int(quantity)\n cost = int(cost)\n print(id_, name, quantity, cost)\n print(type(id_), type(name), type(quantity), type(cost))\n check = cur.execute(f\"SELECT * FROM products WHERE id = '{id_}'\"\n ).fetchall()\n if len(check) > 0:\n return False, ' This Product Already Exist Try Updating '\n else:\n cur.execute('INSERT INTO products VALUES(\"{}\",\"{}\",{},{})'.\n format(id_, name, quantity, cost))\n conn.commit()\n conn.close()\n return True, ' Product Added Successfully '\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\ndef get_product_detail(prod_id):\n if prod_id == '':\n return False, ' Enter Product Id '\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute(f\"SELECT rowid,* FROM products where id='{prod_id}'\"\n ).fetchall()\n conn.close()\n if len(data) == 0:\n return False, \" Product Don't Exist \"\n return True, data\n\n\ndef update_delete_product(rowid, id_, name, quantity, cost, qry):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n try:\n quantity = int(quantity)\n cost = int(cost)\n if qry == 'update':\n cur.execute(\n f\"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}\"\n )\n conn.commit()\n return True, ' Product Updated Successfully '\n if qry == 'delete':\n cur.execute(f'DELETE FROM products WHERE rowid={rowid} ')\n conn.commit()\n return True, ' Product Deleted Successfully '\n conn.commit()\n conn.close()\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\ndef showProducts_all():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute('SELECT * FROM products').fetchall()\n return True, data\n\n\ndef added_to_cart(prod_id, qry):\n if prod_id == '':\n return False, ' Please Enter Product Id ', 1\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n if qry == 'add':\n try:\n cur.execute(\n \"\"\"CREATE TABLE cart(\n id TEXT,\n name TEXT,\n quantity INTEGER,\n cost INTEGER) \"\"\"\n )\n except:\n pass\n data = cur.execute(f\"SELECT * FROM products WHERE id = '{prod_id}'\"\n ).fetchall()\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n cur.execute(\n f\"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n elif len(cart_check) > 0:\n cur.execute(\n f\"UPDATE cart SET quantity = {cart_check[0][2] + 1},cost={cart_check[0][3] + data[0][3]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n if qry == 'remove':\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, \" Product Doesn't Exist \", all_prods\n elif len(cart_check) > 0:\n data = cur.execute(\n f\"SELECT * FROM products WHERE id = '{prod_id}'\").fetchall(\n )\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] + cart_check[0][2]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(f\"DELETE FROM cart WHERE id = '{prod_id}'\")\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Deleted Successfully ', all_prods\n conn.close()\n\n\ndef get_cost():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute('SELECT * FROM cart').fetchall()\n cost = 0\n for i in data:\n cost = cost + i[3]\n return cost\n\n\ndef done_Drp():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n cur.execute('DROP TABLE cart')\n conn.commit()\n",
"step-5": "import sqlite3\n\n\n# cur.execute('CREATE TABLE admin(username TEXT,password TEXT)')\n# conn.commit()\n# cur.execute(\"INSERT INTO admin VALUES('nilesh','nilesh')\")\n# conn.commit()\n\ndef verif_admin(username, password):\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(username)\n print(password)\n data = cur.execute('SELECT password FROM admin WHERE username = \"{}\"'.format(username)).fetchall()[0][0]\n\n conn.close()\n if password == data:\n return True\n else:\n return False\n except:\n return False\n\n\ndef add_product(id_, name, quantity, cost):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, \" You Cannot Leave It Empty \"\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(id_, name, quantity, cost)\n try:\n quantity = int(quantity)\n cost = int(cost)\n print(id_, name, quantity, cost)\n print(type(id_), type(name), type(quantity), type(cost))\n check = cur.execute(f\"SELECT * FROM products WHERE id = '{id_}'\").fetchall()\n if len(check) > 0:\n return False, \" This Product Already Exist Try Updating \"\n else:\n cur.execute('INSERT INTO products VALUES(\"{}\",\"{}\",{},{})'.format(id_, name, quantity, cost))\n conn.commit()\n conn.close()\n return True, \" Product Added Successfully \"\n except:\n\n return False, \" Quantity and Cost are Integers \"\n\n except:\n\n return False, \" Failed Connecting Database \"\n\n\ndef get_product_detail(prod_id):\n if prod_id == '':\n return False, \" Enter Product Id \"\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute(f\"SELECT rowid,* FROM products where id='{prod_id}'\").fetchall()\n conn.close()\n if len(data) == 0:\n return False, \" Product Don't Exist \"\n return True, data\n\n\ndef update_delete_product(rowid, id_, name, quantity, cost, qry):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, \" You Cannot Leave It Empty \"\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n try:\n quantity = int(quantity)\n cost = int(cost)\n if qry == 'update':\n cur.execute(\n f\"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}\")\n conn.commit()\n return True, \" Product Updated Successfully \"\n if qry == \"delete\":\n cur.execute(f\"DELETE FROM products WHERE rowid={rowid} \")\n conn.commit()\n return True, \" Product Deleted Successfully \"\n conn.commit()\n conn.close()\n\n except:\n\n return False, \" Quantity and Cost are Integers \"\n except:\n return False, \" Failed Connecting Database \"\n\n\ndef showProducts_all():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute(\"SELECT * FROM products\").fetchall()\n return True, data\n\n\ndef added_to_cart(prod_id, qry):\n if prod_id == '':\n return False, \" Please Enter Product Id \",1\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n if qry == \"add\":\n try:\n cur.execute(\"\"\"CREATE TABLE cart(\n id TEXT,\n name TEXT,\n quantity INTEGER,\n cost INTEGER) \"\"\")\n except:\n pass\n\n data = cur.execute(f\"\"\"SELECT * FROM products WHERE id = '{prod_id}'\"\"\").fetchall()\n cart_check = cur.execute(f\"\"\"SELECT * FROM cart WHERE id = '{prod_id}' \"\"\").fetchall()\n if len(cart_check) == 0:\n cur.execute(f\"\"\"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})\"\"\")\n conn.commit()\n cur.execute(f\"\"\"UPDATE products SET quantity = {(data[0][2] - 1)} WHERE id ='{prod_id}'\"\"\")\n conn.commit()\n all_prods = cur.execute(\"SELECT * FROM cart\").fetchall()\n return True, \" Product Added To Cart Successfully \",all_prods\n\n elif len(cart_check) > 0:\n cur.execute(\n f\"\"\"UPDATE cart SET quantity = {(cart_check[0][2] + 1)},cost={(cart_check[0][3] + data[0][3])} WHERE id ='{prod_id}'\"\"\")\n conn.commit()\n cur.execute(f\"\"\"UPDATE products SET quantity = {(data[0][2] - 1)} WHERE id ='{prod_id}'\"\"\")\n conn.commit()\n all_prods = cur.execute(\"SELECT * FROM cart\").fetchall()\n return True, \" Product Added To Cart Successfully \",all_prods\n\n\n if qry == \"remove\":\n\n cart_check = cur.execute(f\"\"\"SELECT * FROM cart WHERE id = '{prod_id}' \"\"\").fetchall()\n if len(cart_check) == 0:\n all_prods = cur.execute(\"SELECT * FROM cart\").fetchall()\n return True,\" Product Doesn't Exist \",all_prods\n elif len(cart_check) > 0:\n data = cur.execute(f\"\"\"SELECT * FROM products WHERE id = '{prod_id}'\"\"\").fetchall()\n cur.execute(f\"UPDATE products SET quantity = {(data[0][2]+cart_check[0][2])} WHERE id ='{prod_id}'\")\n conn.commit()\n cur.execute(f\"DELETE FROM cart WHERE id = '{prod_id}'\")\n conn.commit()\n all_prods = cur.execute(\"SELECT * FROM cart\").fetchall()\n return True,\" Product Deleted Successfully \",all_prods\n\n conn.close()\n\n\ndef get_cost():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute(\"SELECT * FROM cart\").fetchall()\n cost = 0\n for i in data:\n cost = cost+i[3]\n return cost\n\n\ndef done_Drp():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n cur.execute(\"DROP TABLE cart\")\n conn.commit()\n\n",
"step-ids": [
3,
6,
7,
9,
10
]
}
|
[
3,
6,
7,
9,
10
] |
import bisect
import sys
input = sys.stdin.readline
N = int(input())
A = [int(input()) for _ in range(N)]
dp = [float('inf')] * (N + 1)
for a in A[::-1]:
idx = bisect.bisect_right(dp, a)
dp[idx] = a
ans = 0
for n in dp:
if n != float('inf'):
ans += 1
print(ans)
|
normal
|
{
"blob_id": "dfe79d2f4bf4abc1d04035cf4556237a53c01122",
"index": 6913,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor a in A[::-1]:\n idx = bisect.bisect_right(dp, a)\n dp[idx] = a\n<mask token>\nfor n in dp:\n if n != float('inf'):\n ans += 1\nprint(ans)\n",
"step-3": "<mask token>\ninput = sys.stdin.readline\nN = int(input())\nA = [int(input()) for _ in range(N)]\ndp = [float('inf')] * (N + 1)\nfor a in A[::-1]:\n idx = bisect.bisect_right(dp, a)\n dp[idx] = a\nans = 0\nfor n in dp:\n if n != float('inf'):\n ans += 1\nprint(ans)\n",
"step-4": "import bisect\nimport sys\ninput = sys.stdin.readline\nN = int(input())\nA = [int(input()) for _ in range(N)]\ndp = [float('inf')] * (N + 1)\nfor a in A[::-1]:\n idx = bisect.bisect_right(dp, a)\n dp[idx] = a\nans = 0\nfor n in dp:\n if n != float('inf'):\n ans += 1\nprint(ans)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -------------------------------------------
# MODULES
# -------------------------------------------
import sys
import platform
if(platform.system()== "Windows"):
dir_sep = "\\"
else:
dir_sep = "/"
import time
import os
import numpy as np
import subprocess
import math
from mathutils import Vector
try:
from CifFile import CifFile
pars_check = False
except:
print("PyCIFRW not installed, try: pip install PyCifRW")
pars_check = True
try:
import bpy
Blender_env = True
except:
print("Not in blender environment.")
# -------------------------------------------
# VARIABLES
# -------------------------------------------
# global variables
file_path = "Select a file" # path to CIF-file
draw_bonds = False # draws bonds between atoms
draw_style = "SPACE FILLING" # sets draw style
draw_quality = "MED" # sets key for qualitydic
draw_lattice = False # draws unit cell outline
atom_name = False # displays names of atoms
bond_distance = 2 # set the max distance between bound atoms
lattice_size = 0.03 # sets size of lattice borders
bond_radius = 0.05 # radius of bond
add_camera = True # render final image
atom_color = True # draw atoms in color
user_feedback = "" # feedback for the user
print_data = True
# dictionaries
# sets detail of spheres
styledic = {
"SPACE FILLING" : [1,0],
"BALL AND STICK" : [0.5,0],
"STICK" : [0,1]
}
# sets detail of spheres
qualitydic = {
"MIN" : 8,
"LOW" : 16,
"MED" : 32,
"HIGH" : 64,
"MAX" : 128
}
'''
Uncomment this when no external dictionaries are found
# dictionary which couples atoms to a color
colordic = {
"O" : [1,0,0],
"Si" : [0.25,0.25,1],
"Fe" : [1,0.2,0.2],
}
# dictionary which couples atoms to a specific size
sizedic = {
"O" : 0.3,
"Si" : 0.6,
"Fe" : 1.4,
}
'''
# Read in dictionaries from external files
path = os.path.dirname(os.path.realpath(__file__))
# dictionary which couples atoms to a color
# Color scheme, in RGB percentages, following the CPK convention was extracted from https://en.wikipedia.org/wiki/CPK_coloring#Typical_assignments
# data can be changed by modifying the values in colordic.txt
with open(path+dir_sep+'colordic.txt','r') as inf:
colordic = eval(inf.read())
# dictionary which couples atoms to a specific size
# Atom data, in Ångström, was extracted from https://en.wikipedia.org/wiki/Atomic_radii_of_the_elements_(data_page)
# data can be changed by modifying the values in sizedic.txt
with open(path+dir_sep+'sizedic.txt','r') as inf:
sizedic = eval(inf.read())
# ----------------------------------------------
# BLENDER ADD-ON
# ----------------------------------------------
# add-on info
bl_info = {
"name": "Crystallographic Drawing Tool for Blender",
"description": "Add-on for drawing crystals from CIF-files.",
"author": "Jarrit Boons",
"blender": (2, 80,0),
"location": "View3D",
"category": "Crystallography in Blender"
}
# Operator to open the file browser and select a file
class ScanFileOperator(bpy.types.Operator):
bl_idname = "error.scan_file"
bl_label = "Scan file for return"
filepath = bpy.props.StringProperty(subtype="FILE_PATH")
def execute(self, context):
global file_path
global user_feedback
user_feedback = ""
file_path = self.filepath
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def register():
bpy.types.Scene.path_to_file = bpy.props.StringProperty(
name="",
description="Path to CIF file",
default = "empty"
)
# Operator to hold CDTB-data and program execution
class Operator(bpy.types.Operator):
bl_idname = "object.cdtb_operator"
bl_label = "CDTB_operator"
bl_descriptor = "Operator for drawing crystal"
# Runs the whole program
def execute(self, context):
global pars_check
global user_feedback
if(pars_check):
user_feedback = "CiFFile module not installed"
return {'FINISHED'}
if(file_path == "Select a file"):
print("No file selected")
user_feedback = "No File selected"
else:
user_feedback = "Crystal drawn"
global draw_bonds
draw_bonds = context.scene.draw_bonds
global bond_distance
bond_distance = context.scene.bond_distance
global draw_lattice
draw_lattice = context.scene.draw_lattice
global atom_name
atom_name = context.scene.atom_name
global print_data
print_data = context.scene.print_data
global draw_style
global atom_color
draw_style = context.scene.style_selection_mode
if(draw_style=="STICK"):
draw_bonds = True
atom_color = False
else:
atom_color = True
global draw_quality
draw_quality = context.scene.quality_selection_mode
global add_camera
add_camera = context.scene.add_camera
drawCrystal(file_path)
return {'FINISHED'}
@classmethod
def register(cls):
print("Registered class: %s " % cls.bl_label)
bpy.types.Scene.draw_bonds = bpy.props.BoolProperty(
name="Draw bonds",
description="Draw bonds between elements"
)
bpy.types.Scene.bond_distance = bpy.props.FloatProperty(
name="Bond distance",
description="Set max distance for bonds to occur",
default=2,
min=0.0,
max=10.0,
precision=2
)
bpy.types.Scene.atom_name = bpy.props.BoolProperty(
name="Atom names",
description="Display the name of atoms"
)
bpy.types.Scene.draw_lattice = bpy.props.BoolProperty(
name="Draw lattice",
description="Draw unit cell outline"
)
bpy.types.Scene.print_data = bpy.props.BoolProperty(
name="Print data",
description="Print crystal data in terminal"
)
# Dropdown menu for drawing style
selection_style = [
("SPACE FILLING", "SPACE FILLING", "", 1),
("BALL AND STICK", "BALL AND STICK", "", 2),
("STICK", "STICK", "", 3),
]
bpy.types.Scene.style_selection_mode = bpy.props.EnumProperty(
items=selection_style,
name="Style"
)
# Dropdown menu for drawing quality
selection_qual = [
("MIN", "MIN", "", 1),
("LOW", "LOW", "", 2),
("MED", "MED", "", 3),
("HIGH", "HIGH", "", 4),
("MAX", "MAX", "", 5)
]
bpy.types.Scene.quality_selection_mode = bpy.props.EnumProperty(
items=selection_qual,
name="Quality",
default="MED"
)
bpy.types.Scene.add_camera = bpy.props.BoolProperty(
name="Place camera",
description="Place a camera and light to make rendering possible"
)
@classmethod
def unregister(cls):
print("Unregistered class: %s " % cls.bl_label)
# Panel to display add-on in Blender environment
class Panel(bpy.types.Panel):
bl_idname = "CDTB_Panel"
bl_label = "CDTB_Panel"
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
bl_context = "objectmode"
bl_category = "CDTB"
def draw(self,context):
scn = context.scene
layout = self.layout
layout.label(text = 'Input file',icon_value=112)
'''
for i in range(100):
layout.label(text = str(i),icon_value =i)
'''
box = layout.box()
row = box.row()
splitrow = row.split(factor=0.075)
left_col = splitrow.column()
right_col = splitrow.column()
left_col.operator('error.scan_file',icon_value=108,text="")
right_col.label(text=file_path.rsplit('\\', 2)[-1])
layout.label(text = 'Settings',icon_value =117)
box = layout.box()
box.prop(scn,'draw_bonds')
box.prop(scn,'bond_distance')
box.prop(scn,'draw_lattice')
box.prop(scn, 'atom_name')
box.prop(scn,'print_data')
box.prop(scn, 'style_selection_mode')
box.prop(scn, 'quality_selection_mode')
box.prop(scn, 'add_camera')
layout.separator()
splitrow = layout.split(factor=0.3)
col = splitrow.column()
col.operator('object.cdtb_operator',text="Draw Crystal")
col = splitrow.column()
col.label(text=user_feedback)
layout.separator()
@classmethod
def register(cls):
print("Registered class: %s " % cls.bl_label)
@classmethod
def unregister(cls):
print("Unregistered class: %s " % cls.bl_label)
def register():
bpy.utils.register_class(Operator)
bpy.utils.register_class(ScanFileOperator)
bpy.utils.register_class(Panel)
def unregister():
bpy.utils.unregister_class(Operator)
bpy.utils.unregister_class(Panel)
bpy.utils.unregister_class(ScanFileOperator)
#----------------------------------------------
# MAIN PROGRAM
#----------------------------------------------
class Crysdata():
def __init__(self,F,cb):
self.start = time.time()
print("Draw timer started")
self.name = F
self.cell = Cell(cb)
self.atoms = readEl(cb)
self.pos = readPos(cb)
c = self.cell
self.ftoc = self.get_fractional_to_cartesian_matrix(c.alen,c.blen,c.clen,c.alpha,c.beta,c.gamma)
def printout(self):
print(self.name)
print()
self.cell.printout()
print()
for element in self.pos:
element.printout()
print()
for element in self.atoms:
element.printout()
print()
print("Fractional to cartesian matrix:")
print(self.ftoc)
def get_fractional_to_cartesian_matrix(self,a, b, c, alpha, beta, gamma):
"""
Original code found at: https://gist.github.com/Bismarrck/a68da01f19b39320f78a
!changed formula to resemble one found on: https://en.wikipedia.org/wiki/Fractional_coordinates
Return the transformation matrix that converts fractional coordinates to
cartesian coordinates.
Parameters
----------
a, b, c : float
The lengths of the edges.
alpha, gamma, beta : float
The angles between the sides.
angle_in_degrees : bool
True if alpha, beta and gamma are expressed in degrees.
Returns
-------
r : array_like
The 3x3 rotation matrix. ``V_cart = np.dot(r, V_frac)``.
"""
alpha = np.deg2rad(alpha)
beta = np.deg2rad(beta)
gamma = np.deg2rad(gamma)
cosa = np.cos(alpha)
sina = np.sin(alpha)
cosb = np.cos(beta)
sinb = np.sin(beta)
cosg = np.cos(gamma)
sing = np.sin(gamma)
volume = 1.0 - cosa**2.0 - cosb**2.0 - cosg**2.0 + 2.0 * cosa * cosb * cosg
volume = a*b*c*np.sqrt(volume)
r = np.zeros((3, 3))
r[0, 0] = float(a)
r[0, 1] = float(b * cosg)
r[0, 2] = float(c * cosb)
r[1, 0] = float(0)
r[1, 1] = float(b * sing)
r[1, 2] = float(c * (cosa - cosb * cosg) / sing)
r[2, 0] = float(0)
r[2, 1] = float(0)
r[2, 2] = float(volume / (a*b*sing))
return r
def drawCrystal(self):
if draw_lattice:
self.drawCell()
print("Lattice drawn after {:.3f} seconds".format((time.time()-self.start)))
self.drawAtoms()
print("Atoms drawn after {:.3f} seconds".format((time.time()-self.start)))
if(draw_bonds):
self.drawBonds()
print("Bonds drawn after {:.3f} seconds".format((time.time()-self.start)))
def drawAtoms(self):
for a in self.atoms:
a.drawObj(self.ftoc)
print("Atoms drawn:",len(self.atoms))
def drawCell(self):
cell_corners=[]
cell_edges=[]
# calculate and draw corners
for i in range(2):
for j in range(2):
for k in range(2):
bpy.ops.mesh.primitive_uv_sphere_add(size=lattice_size,location=toCarth(self.ftoc,[i,j,k]))
activeObject = bpy.context.active_object # Set active object to variable
cell_corners.append(activeObject)
mat = bpy.data.materials.new(name="MaterialName") # set new material to variable
activeObject.data.materials.append(mat) # add the material to the object
bpy.context.object.active_material.diffuse_color = [0,0,0] # change color
# draw lines
for i,j in zip([0,0,0,1,1,2,2,3,4,4,5,6],[1,2,4,3,5,3,6,7,5,6,7,7]):
cell_edges.append(self.drawLine(cell_corners[i].location,cell_corners[j].location))
# select all line and corners
for i in cell_corners:
i.select_set(action="SELECT")
for i in cell_edges:
i.select_set(action="SELECT")
# set corner in origin as active and join meshes as one object
bpy.context.view_layer.objects.active = cell_corners[0]
bpy.ops.object.join()
print("Cell box drawn")
def drawLine(self,ac,tc):
dx = tc[0] - ac[0]
dy = tc[1] - ac[1]
dz = tc[2] - ac[2]
dist = np.sqrt(dx**2 + dy**2 + dz**2)
bpy.ops.mesh.primitive_cylinder_add(vertices=qualitydic[draw_quality],radius=lattice_size,depth = dist,location = (dx/2 + ac[0], dy/2 + ac[1], dz/2 + ac[2]))
activeObject = bpy.context.active_object
mat = bpy.data.materials.new(name="MaterialName") # set new material to variable
activeObject.data.materials.append(mat) # add the material to the object
bpy.context.object.active_material.diffuse_color = [0,0,0] # change color
phi = math.atan2(dy, dx)
theta = math.acos(dz/dist)
bpy.context.object.rotation_euler[1] = theta
bpy.context.object.rotation_euler[2] = phi
return activeObject
def drawBonds(self):
cnt = 0
bpy.ops.curve.primitive_bezier_circle_add(location=(0,0,0),radius = bond_radius)
bpy.context.object.name = 'bez'
for atom in self.atoms:
for target in self.atoms:
if atom != target:
if("bond{}-{}".format(target.elid,atom.elid)in bpy.data.objects):
continue
if(atom.sym == 'H' and target.sym == 'H'):
continue
if calcDistance(self.ftoc,atom,target) <= bond_distance:
self.makeBond(atom,target)
cnt += 1
print("Atom bonds drawn:",cnt)
# This function hooks the bond to the atoms
def makeBond(self,atom,target):
if 'OBJECT'!=bpy.context.mode:
bpy.ops.object.mode_set(mode='OBJECT')
o1 = bpy.data.objects[atom.elid]
o2 = bpy.data.objects[target.elid]
bond = self.hookCurve(o1,o2, bpy.context.scene)
bpy.context.object.data.bevel_object = bpy.data.objects["bez"]
bpy.context.object.name = "bond{}-{}".format(atom.elid,target.elid)
activeObject = bpy.context.active_object # Set active object to variable
mat = bpy.data.materials.new(name="MaterialName") # set new material to variable
activeObject.data.materials.append(mat) # add the material to the object
bpy.context.object.active_material.diffuse_color = [255,255,255] # change color
if 'OBJECT'!=bpy.context.mode:
bpy.ops.object.mode_set(mode='OBJECT')
def hookCurve(self,o1, o2, scn):
curve = bpy.data.curves.new("link", 'CURVE')
curve.dimensions = '3D'
spline = curve.splines.new('BEZIER')
spline.bezier_points.add(1)
p0 = spline.bezier_points[0]
p1 = spline.bezier_points[1]
# p0.co = o1.location
p0.handle_right_type = 'VECTOR'
# p1.co = o2.location
p1.handle_left_type = 'VECTOR'
obj = bpy.data.objects.new("link", curve)
m0 = obj.modifiers.new("alpha", 'HOOK')
m0.object = o1
m1 = obj.modifiers.new("beta", 'HOOK')
m1.object = o2
bpy.context.collection.objects.link(obj)
bpy.context.view_layer.objects.active = obj
bpy.ops.object.mode_set(mode='EDIT')
# Reassign the points
p0 = curve.splines[0].bezier_points[0]
p1 = curve.splines[0].bezier_points[1]
# Hook first control point to first atom
p0.select_control_point = True
p1.select_control_point = False
bpy.ops.object.hook_assign(modifier="alpha")
# Hook second control point to first atom
p0 = curve.splines[0].bezier_points[0]
p1 = curve.splines[0].bezier_points[1]
p1.select_control_point = True
p0.select_control_point = False
bpy.ops.object.hook_assign(modifier="beta")
return obj
class Cell():
def __init__(self,cb):
self.alen = float(cb["_cell_length_a"])
self.blen = float(cb["_cell_length_b"])
self.clen = float(cb["_cell_length_c"])
self.alpha = float(cb["_cell_angle_alpha"])
self.beta = float(cb["_cell_angle_beta"])
self.gamma = float(cb["_cell_angle_gamma"])
def printout(self):
print("alen:{:8} \nblen:{:8} \nclen:{:8} \nalpha:{:8} \nbeta: {:8} \ngamma:{:8}".format(self.alen,self.blen,self.clen,self.alpha,self.beta,self.gamma))
class Atom():
def __init__(self,elid,sym,xpos,ypos,zpos):
self.elid = elid
self.sym = sym
self.xpos = float(xpos)
self.ypos = float(ypos)
self.zpos = float(zpos)
def printout(self):
print("id:{:3} symbol:{:2} x:{:.4f} y:{:.4f} z:{:.4f}".format(self.elid,self.sym,self.xpos,self.ypos,self.zpos))
def drawObj(self,ftoc):
size = sizedic[self.sym]*styledic[draw_style][0]+bond_radius*styledic[draw_style][1]
bpy.ops.mesh.primitive_uv_sphere_add(segments=qualitydic[draw_quality],ring_count=qualitydic[draw_quality]/2,size=size,location=toCarth(ftoc,[self.xpos,self.ypos,self.zpos]))
bpy.context.object.name = self.elid
activeObject = bpy.context.active_object # Set active object to variable
mat = bpy.data.materials.new(name="MaterialName") # set new material to variable
activeObject.data.materials.append(mat) # add the material to the object
if(atom_name):
bpy.context.object.show_name = True
if(atom_color):
bpy.context.object.active_material.diffuse_color = colordic[self.sym] # change color to dictionary color
else:
bpy.context.object.active_material.diffuse_color = [1,1,1] # change color to white
class sympos():
def __init__(self,string):
self.xsym = (string[0].split(','))[0]
self.ysym = (string[0].split(','))[1]
self.zsym = (string[0].split(','))[2]
def printout(self):
print("x:{:8} y:{:8} z:{:8}".format(self.xsym,self.ysym,self.zsym))
def readEl(cb):
elements = []
previd = []
idcnt = []
lb = cb.GetLoop("_atom_site_label")
for el in lb:
flag = False
for i in range(len(previd)):
if(el[0] == previd[i]):
flag = True
break
if(flag):
idcnt[i] += 1
else:
previd.append(el[0])
idcnt.append(0)
i = len(idcnt)-1
id_t = "{}.{}".format(el[0],idcnt[i])
elements.append(Atom(id_t,el[1],el[2],el[3],el[4]))
return elements
def readPos(cb):
positions = [];
lb = cb.GetLoop("_symmetry_equiv_pos_as_xyz")
for el in lb:
positions.append(sympos(el))
return positions
def obabel_fill_unit_cell(cif_file, p1_file):
# Convert symmetry to P1 using openbabel as subprocess
# Notation: obabel [-i<input-type>] <infilename> [-o<output-type>] -O<outfilename> [Options]
subprocess.run(['obabel', '-icif', cif_file, '-ocif', '-O', p1_file, '--fillUC', 'keepconnect'])
def calcDistance(ftoc,atom1,atom2):
ac = toCarth(ftoc,[atom1.xpos,atom1.ypos,atom1.zpos])
tc = toCarth(ftoc,[atom2.xpos,atom2.ypos,atom2.zpos])
dx = tc[0] - ac[0]
dy = tc[1] - ac[1]
dz = tc[2] - ac[2]
dist = np.sqrt(dx**2 + dy**2 + dz**2)
return dist
def toCarth(ftoc,V_frac):
return np.dot(ftoc, V_frac)
def look_at(obj_camera, point):
loc_camera = obj_camera.matrix_world.to_translation()
direction = point - loc_camera
# point the cameras '-Z' and use its 'Y' as up
rot_quat = direction.to_track_quat('-Z', 'Y')
# assume we're using euler rotation
obj_camera.rotation_euler = rot_quat.to_euler()
def addCamera(x,y,z):
bpy.ops.object.camera_add(view_align=True, enter_editmode=False, location=(5*x,5*y,5*z))
print("camera added")
bpy.ops.object.light_add(type='SUN', view_align=False, location=(0, 0, 0))
obj_camera = bpy.data.objects["Camera"]
look_at(obj_camera, Vector([0,0,z/4]))
obj_camera.data.type = 'ORTHO'
obj_camera.data.ortho_scale = ((x+y+z))
def clearWS():
if 'OBJECT'!=bpy.context.mode:
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.select_all(action='SELECT')
bpy.ops.object.delete(use_global=False)
# remove all previous curves
for i in bpy.data.curves:
bpy.data.curves.remove(i)
# remove all previous materials
for m in bpy.data.materials:
bpy.data.materials.remove(m)
# remove all previous camera's
for c in bpy.data.cameras:
bpy.data.cameras.remove(c)
print("Workspace cleared.")
return
def drawCrystal(file):
# Check if file is file:
S = time.time()
global user_feedback
ext = file[len(file)-4:]
if(ext.lower() != ".cif"):
print("Only cif files can be visualised")
user_feedback = "Not a cif file"
return
# Check OpenBabel installation
try:
# Convert the cif file to its P1 symmetry notation as a temporary cif file
print('Converting %s to P1' %file)
obabel_fill_unit_cell(file, "temp.CIF")
cf = CifFile("temp.CIF")
except:
print("No OpenBabel installation found, install it from http://openbabel.org/wiki/Category:Installation")
user_feedback = "OpenBabel not installed"
#cf = CifFile(file) CifFile apparently can't read in long filepaths
return
# Open and parse our cif
f = file.rsplit(dir_sep, 1)[-1]
F = f[:3]
print(f)
cb = cf.first_block()
Crystal = Crysdata(F,cb)
# Print crystal data in terminal if checked
if(print_data):
Crystal.printout()
print("Crystal data read after "+ str(time.time() - S) + " seconds")
# Draw crystal if in Blender environment
if(Blender_env):
clearWS()
Crystal.drawCrystal()
bpy.ops.object.select_all(action='DESELECT')
if(add_camera):
addCamera(Crystal.cell.alen,Crystal.cell.blen,Crystal.cell.clen)
|
normal
|
{
"blob_id": "e14319e705a3c1cdf85e0a2fe77c211e2afa9baa",
"index": 9880,
"step-1": "<mask token>\n\n\nclass Crysdata:\n\n def __init__(self, F, cb):\n self.start = time.time()\n print('Draw timer started')\n self.name = F\n self.cell = Cell(cb)\n self.atoms = readEl(cb)\n self.pos = readPos(cb)\n c = self.cell\n self.ftoc = self.get_fractional_to_cartesian_matrix(c.alen, c.blen,\n c.clen, c.alpha, c.beta, c.gamma)\n\n def printout(self):\n print(self.name)\n print()\n self.cell.printout()\n print()\n for element in self.pos:\n element.printout()\n print()\n for element in self.atoms:\n element.printout()\n print()\n print('Fractional to cartesian matrix:')\n print(self.ftoc)\n\n def get_fractional_to_cartesian_matrix(self, a, b, c, alpha, beta, gamma):\n \"\"\"\n Original code found at: https://gist.github.com/Bismarrck/a68da01f19b39320f78a\n\n !changed formula to resemble one found on: https://en.wikipedia.org/wiki/Fractional_coordinates\n\n Return the transformation matrix that converts fractional coordinates to\n cartesian coordinates.\n Parameters\n ----------\n a, b, c : float\n The lengths of the edges.\n alpha, gamma, beta : float\n The angles between the sides.\n angle_in_degrees : bool\n True if alpha, beta and gamma are expressed in degrees.\n Returns\n -------\n r : array_like\n The 3x3 rotation matrix. ``V_cart = np.dot(r, V_frac)``.\n \"\"\"\n alpha = np.deg2rad(alpha)\n beta = np.deg2rad(beta)\n gamma = np.deg2rad(gamma)\n cosa = np.cos(alpha)\n sina = np.sin(alpha)\n cosb = np.cos(beta)\n sinb = np.sin(beta)\n cosg = np.cos(gamma)\n sing = np.sin(gamma)\n volume = (1.0 - cosa ** 2.0 - cosb ** 2.0 - cosg ** 2.0 + 2.0 *\n cosa * cosb * cosg)\n volume = a * b * c * np.sqrt(volume)\n r = np.zeros((3, 3))\n r[0, 0] = float(a)\n r[0, 1] = float(b * cosg)\n r[0, 2] = float(c * cosb)\n r[1, 0] = float(0)\n r[1, 1] = float(b * sing)\n r[1, 2] = float(c * (cosa - cosb * cosg) / sing)\n r[2, 0] = float(0)\n r[2, 1] = float(0)\n r[2, 2] = float(volume / (a * b * sing))\n return r\n\n def drawCrystal(self):\n if draw_lattice:\n self.drawCell()\n print('Lattice drawn after {:.3f} seconds'.format(time.time() -\n self.start))\n self.drawAtoms()\n print('Atoms drawn after {:.3f} seconds'.format(time.time() - self.\n start))\n if draw_bonds:\n self.drawBonds()\n print('Bonds drawn after {:.3f} seconds'.format(time.time() -\n self.start))\n\n def drawAtoms(self):\n for a in self.atoms:\n a.drawObj(self.ftoc)\n print('Atoms drawn:', len(self.atoms))\n\n def drawCell(self):\n cell_corners = []\n cell_edges = []\n for i in range(2):\n for j in range(2):\n for k in range(2):\n bpy.ops.mesh.primitive_uv_sphere_add(size=lattice_size,\n location=toCarth(self.ftoc, [i, j, k]))\n activeObject = bpy.context.active_object\n cell_corners.append(activeObject)\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n bpy.context.object.active_material.diffuse_color = [0, 0, 0\n ]\n for i, j in zip([0, 0, 0, 1, 1, 2, 2, 3, 4, 4, 5, 6], [1, 2, 4, 3, \n 5, 3, 6, 7, 5, 6, 7, 7]):\n cell_edges.append(self.drawLine(cell_corners[i].location,\n cell_corners[j].location))\n for i in cell_corners:\n i.select_set(action='SELECT')\n for i in cell_edges:\n i.select_set(action='SELECT')\n bpy.context.view_layer.objects.active = cell_corners[0]\n bpy.ops.object.join()\n print('Cell box drawn')\n\n def drawLine(self, ac, tc):\n dx = tc[0] - ac[0]\n dy = tc[1] - ac[1]\n dz = tc[2] - ac[2]\n dist = np.sqrt(dx ** 2 + dy ** 2 + dz ** 2)\n bpy.ops.mesh.primitive_cylinder_add(vertices=qualitydic[\n draw_quality], radius=lattice_size, depth=dist, location=(dx / \n 2 + ac[0], dy / 2 + ac[1], dz / 2 + ac[2]))\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n bpy.context.object.active_material.diffuse_color = [0, 0, 0]\n phi = math.atan2(dy, dx)\n theta = math.acos(dz / dist)\n bpy.context.object.rotation_euler[1] = theta\n bpy.context.object.rotation_euler[2] = phi\n return activeObject\n\n def drawBonds(self):\n cnt = 0\n bpy.ops.curve.primitive_bezier_circle_add(location=(0, 0, 0),\n radius=bond_radius)\n bpy.context.object.name = 'bez'\n for atom in self.atoms:\n for target in self.atoms:\n if atom != target:\n if 'bond{}-{}'.format(target.elid, atom.elid\n ) in bpy.data.objects:\n continue\n if atom.sym == 'H' and target.sym == 'H':\n continue\n if calcDistance(self.ftoc, atom, target) <= bond_distance:\n self.makeBond(atom, target)\n cnt += 1\n print('Atom bonds drawn:', cnt)\n\n def makeBond(self, atom, target):\n if 'OBJECT' != bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n o1 = bpy.data.objects[atom.elid]\n o2 = bpy.data.objects[target.elid]\n bond = self.hookCurve(o1, o2, bpy.context.scene)\n bpy.context.object.data.bevel_object = bpy.data.objects['bez']\n bpy.context.object.name = 'bond{}-{}'.format(atom.elid, target.elid)\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n bpy.context.object.active_material.diffuse_color = [255, 255, 255]\n if 'OBJECT' != bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n <mask token>\n\n\nclass Cell:\n\n def __init__(self, cb):\n self.alen = float(cb['_cell_length_a'])\n self.blen = float(cb['_cell_length_b'])\n self.clen = float(cb['_cell_length_c'])\n self.alpha = float(cb['_cell_angle_alpha'])\n self.beta = float(cb['_cell_angle_beta'])\n self.gamma = float(cb['_cell_angle_gamma'])\n\n def printout(self):\n print(\n 'alen:{:8} \\nblen:{:8} \\nclen:{:8} \\nalpha:{:8} \\nbeta: {:8} \\ngamma:{:8}'\n .format(self.alen, self.blen, self.clen, self.alpha, self.beta,\n self.gamma))\n\n\nclass Atom:\n\n def __init__(self, elid, sym, xpos, ypos, zpos):\n self.elid = elid\n self.sym = sym\n self.xpos = float(xpos)\n self.ypos = float(ypos)\n self.zpos = float(zpos)\n\n def printout(self):\n print('id:{:3} symbol:{:2} x:{:.4f} y:{:.4f} z:{:.4f}'.format(self.\n elid, self.sym, self.xpos, self.ypos, self.zpos))\n\n def drawObj(self, ftoc):\n size = sizedic[self.sym] * styledic[draw_style][0\n ] + bond_radius * styledic[draw_style][1]\n bpy.ops.mesh.primitive_uv_sphere_add(segments=qualitydic[\n draw_quality], ring_count=qualitydic[draw_quality] / 2, size=\n size, location=toCarth(ftoc, [self.xpos, self.ypos, self.zpos]))\n bpy.context.object.name = self.elid\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n if atom_name:\n bpy.context.object.show_name = True\n if atom_color:\n bpy.context.object.active_material.diffuse_color = colordic[self\n .sym]\n else:\n bpy.context.object.active_material.diffuse_color = [1, 1, 1]\n\n\nclass sympos:\n\n def __init__(self, string):\n self.xsym = string[0].split(',')[0]\n self.ysym = string[0].split(',')[1]\n self.zsym = string[0].split(',')[2]\n\n def printout(self):\n print('x:{:8} y:{:8} z:{:8}'.format(self.xsym, self.ysym, self.zsym))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ScanFileOperator(bpy.types.Operator):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Operator(bpy.types.Operator):\n bl_idname = 'object.cdtb_operator'\n bl_label = 'CDTB_operator'\n bl_descriptor = 'Operator for drawing crystal'\n\n def execute(self, context):\n global pars_check\n global user_feedback\n if pars_check:\n user_feedback = 'CiFFile module not installed'\n return {'FINISHED'}\n if file_path == 'Select a file':\n print('No file selected')\n user_feedback = 'No File selected'\n else:\n user_feedback = 'Crystal drawn'\n global draw_bonds\n draw_bonds = context.scene.draw_bonds\n global bond_distance\n bond_distance = context.scene.bond_distance\n global draw_lattice\n draw_lattice = context.scene.draw_lattice\n global atom_name\n atom_name = context.scene.atom_name\n global print_data\n print_data = context.scene.print_data\n global draw_style\n global atom_color\n draw_style = context.scene.style_selection_mode\n if draw_style == 'STICK':\n draw_bonds = True\n atom_color = False\n else:\n atom_color = True\n global draw_quality\n draw_quality = context.scene.quality_selection_mode\n global add_camera\n add_camera = context.scene.add_camera\n drawCrystal(file_path)\n return {'FINISHED'}\n\n @classmethod\n def register(cls):\n print('Registered class: %s ' % cls.bl_label)\n bpy.types.Scene.draw_bonds = bpy.props.BoolProperty(name=\n 'Draw bonds', description='Draw bonds between elements')\n bpy.types.Scene.bond_distance = bpy.props.FloatProperty(name=\n 'Bond distance', description=\n 'Set max distance for bonds to occur', default=2, min=0.0, max=\n 10.0, precision=2)\n bpy.types.Scene.atom_name = bpy.props.BoolProperty(name=\n 'Atom names', description='Display the name of atoms')\n bpy.types.Scene.draw_lattice = bpy.props.BoolProperty(name=\n 'Draw lattice', description='Draw unit cell outline')\n bpy.types.Scene.print_data = bpy.props.BoolProperty(name=\n 'Print data', description='Print crystal data in terminal')\n selection_style = [('SPACE FILLING', 'SPACE FILLING', '', 1), (\n 'BALL AND STICK', 'BALL AND STICK', '', 2), ('STICK', 'STICK',\n '', 3)]\n bpy.types.Scene.style_selection_mode = bpy.props.EnumProperty(items\n =selection_style, name='Style')\n selection_qual = [('MIN', 'MIN', '', 1), ('LOW', 'LOW', '', 2), (\n 'MED', 'MED', '', 3), ('HIGH', 'HIGH', '', 4), ('MAX', 'MAX',\n '', 5)]\n bpy.types.Scene.quality_selection_mode = bpy.props.EnumProperty(items\n =selection_qual, name='Quality', default='MED')\n bpy.types.Scene.add_camera = bpy.props.BoolProperty(name=\n 'Place camera', description=\n 'Place a camera and light to make rendering possible')\n\n @classmethod\n def unregister(cls):\n print('Unregistered class: %s ' % cls.bl_label)\n\n\nclass Panel(bpy.types.Panel):\n bl_idname = 'CDTB_Panel'\n bl_label = 'CDTB_Panel'\n bl_space_type = 'VIEW_3D'\n bl_region_type = 'TOOLS'\n bl_context = 'objectmode'\n bl_category = 'CDTB'\n\n def draw(self, context):\n scn = context.scene\n layout = self.layout\n layout.label(text='Input file', icon_value=112)\n \"\"\"\n for i in range(100):\n layout.label(text = str(i),icon_value =i)\n \"\"\"\n box = layout.box()\n row = box.row()\n splitrow = row.split(factor=0.075)\n left_col = splitrow.column()\n right_col = splitrow.column()\n left_col.operator('error.scan_file', icon_value=108, text='')\n right_col.label(text=file_path.rsplit('\\\\', 2)[-1])\n layout.label(text='Settings', icon_value=117)\n box = layout.box()\n box.prop(scn, 'draw_bonds')\n box.prop(scn, 'bond_distance')\n box.prop(scn, 'draw_lattice')\n box.prop(scn, 'atom_name')\n box.prop(scn, 'print_data')\n box.prop(scn, 'style_selection_mode')\n box.prop(scn, 'quality_selection_mode')\n box.prop(scn, 'add_camera')\n layout.separator()\n splitrow = layout.split(factor=0.3)\n col = splitrow.column()\n col.operator('object.cdtb_operator', text='Draw Crystal')\n col = splitrow.column()\n col.label(text=user_feedback)\n layout.separator()\n\n @classmethod\n def register(cls):\n print('Registered class: %s ' % cls.bl_label)\n\n @classmethod\n def unregister(cls):\n print('Unregistered class: %s ' % cls.bl_label)\n\n\n<mask token>\n\n\nclass Crysdata:\n\n def __init__(self, F, cb):\n self.start = time.time()\n print('Draw timer started')\n self.name = F\n self.cell = Cell(cb)\n self.atoms = readEl(cb)\n self.pos = readPos(cb)\n c = self.cell\n self.ftoc = self.get_fractional_to_cartesian_matrix(c.alen, c.blen,\n c.clen, c.alpha, c.beta, c.gamma)\n\n def printout(self):\n print(self.name)\n print()\n self.cell.printout()\n print()\n for element in self.pos:\n element.printout()\n print()\n for element in self.atoms:\n element.printout()\n print()\n print('Fractional to cartesian matrix:')\n print(self.ftoc)\n\n def get_fractional_to_cartesian_matrix(self, a, b, c, alpha, beta, gamma):\n \"\"\"\n Original code found at: https://gist.github.com/Bismarrck/a68da01f19b39320f78a\n\n !changed formula to resemble one found on: https://en.wikipedia.org/wiki/Fractional_coordinates\n\n Return the transformation matrix that converts fractional coordinates to\n cartesian coordinates.\n Parameters\n ----------\n a, b, c : float\n The lengths of the edges.\n alpha, gamma, beta : float\n The angles between the sides.\n angle_in_degrees : bool\n True if alpha, beta and gamma are expressed in degrees.\n Returns\n -------\n r : array_like\n The 3x3 rotation matrix. ``V_cart = np.dot(r, V_frac)``.\n \"\"\"\n alpha = np.deg2rad(alpha)\n beta = np.deg2rad(beta)\n gamma = np.deg2rad(gamma)\n cosa = np.cos(alpha)\n sina = np.sin(alpha)\n cosb = np.cos(beta)\n sinb = np.sin(beta)\n cosg = np.cos(gamma)\n sing = np.sin(gamma)\n volume = (1.0 - cosa ** 2.0 - cosb ** 2.0 - cosg ** 2.0 + 2.0 *\n cosa * cosb * cosg)\n volume = a * b * c * np.sqrt(volume)\n r = np.zeros((3, 3))\n r[0, 0] = float(a)\n r[0, 1] = float(b * cosg)\n r[0, 2] = float(c * cosb)\n r[1, 0] = float(0)\n r[1, 1] = float(b * sing)\n r[1, 2] = float(c * (cosa - cosb * cosg) / sing)\n r[2, 0] = float(0)\n r[2, 1] = float(0)\n r[2, 2] = float(volume / (a * b * sing))\n return r\n\n def drawCrystal(self):\n if draw_lattice:\n self.drawCell()\n print('Lattice drawn after {:.3f} seconds'.format(time.time() -\n self.start))\n self.drawAtoms()\n print('Atoms drawn after {:.3f} seconds'.format(time.time() - self.\n start))\n if draw_bonds:\n self.drawBonds()\n print('Bonds drawn after {:.3f} seconds'.format(time.time() -\n self.start))\n\n def drawAtoms(self):\n for a in self.atoms:\n a.drawObj(self.ftoc)\n print('Atoms drawn:', len(self.atoms))\n\n def drawCell(self):\n cell_corners = []\n cell_edges = []\n for i in range(2):\n for j in range(2):\n for k in range(2):\n bpy.ops.mesh.primitive_uv_sphere_add(size=lattice_size,\n location=toCarth(self.ftoc, [i, j, k]))\n activeObject = bpy.context.active_object\n cell_corners.append(activeObject)\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n bpy.context.object.active_material.diffuse_color = [0, 0, 0\n ]\n for i, j in zip([0, 0, 0, 1, 1, 2, 2, 3, 4, 4, 5, 6], [1, 2, 4, 3, \n 5, 3, 6, 7, 5, 6, 7, 7]):\n cell_edges.append(self.drawLine(cell_corners[i].location,\n cell_corners[j].location))\n for i in cell_corners:\n i.select_set(action='SELECT')\n for i in cell_edges:\n i.select_set(action='SELECT')\n bpy.context.view_layer.objects.active = cell_corners[0]\n bpy.ops.object.join()\n print('Cell box drawn')\n\n def drawLine(self, ac, tc):\n dx = tc[0] - ac[0]\n dy = tc[1] - ac[1]\n dz = tc[2] - ac[2]\n dist = np.sqrt(dx ** 2 + dy ** 2 + dz ** 2)\n bpy.ops.mesh.primitive_cylinder_add(vertices=qualitydic[\n draw_quality], radius=lattice_size, depth=dist, location=(dx / \n 2 + ac[0], dy / 2 + ac[1], dz / 2 + ac[2]))\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n bpy.context.object.active_material.diffuse_color = [0, 0, 0]\n phi = math.atan2(dy, dx)\n theta = math.acos(dz / dist)\n bpy.context.object.rotation_euler[1] = theta\n bpy.context.object.rotation_euler[2] = phi\n return activeObject\n\n def drawBonds(self):\n cnt = 0\n bpy.ops.curve.primitive_bezier_circle_add(location=(0, 0, 0),\n radius=bond_radius)\n bpy.context.object.name = 'bez'\n for atom in self.atoms:\n for target in self.atoms:\n if atom != target:\n if 'bond{}-{}'.format(target.elid, atom.elid\n ) in bpy.data.objects:\n continue\n if atom.sym == 'H' and target.sym == 'H':\n continue\n if calcDistance(self.ftoc, atom, target) <= bond_distance:\n self.makeBond(atom, target)\n cnt += 1\n print('Atom bonds drawn:', cnt)\n\n def makeBond(self, atom, target):\n if 'OBJECT' != bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n o1 = bpy.data.objects[atom.elid]\n o2 = bpy.data.objects[target.elid]\n bond = self.hookCurve(o1, o2, bpy.context.scene)\n bpy.context.object.data.bevel_object = bpy.data.objects['bez']\n bpy.context.object.name = 'bond{}-{}'.format(atom.elid, target.elid)\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n bpy.context.object.active_material.diffuse_color = [255, 255, 255]\n if 'OBJECT' != bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n\n def hookCurve(self, o1, o2, scn):\n curve = bpy.data.curves.new('link', 'CURVE')\n curve.dimensions = '3D'\n spline = curve.splines.new('BEZIER')\n spline.bezier_points.add(1)\n p0 = spline.bezier_points[0]\n p1 = spline.bezier_points[1]\n p0.handle_right_type = 'VECTOR'\n p1.handle_left_type = 'VECTOR'\n obj = bpy.data.objects.new('link', curve)\n m0 = obj.modifiers.new('alpha', 'HOOK')\n m0.object = o1\n m1 = obj.modifiers.new('beta', 'HOOK')\n m1.object = o2\n bpy.context.collection.objects.link(obj)\n bpy.context.view_layer.objects.active = obj\n bpy.ops.object.mode_set(mode='EDIT')\n p0 = curve.splines[0].bezier_points[0]\n p1 = curve.splines[0].bezier_points[1]\n p0.select_control_point = True\n p1.select_control_point = False\n bpy.ops.object.hook_assign(modifier='alpha')\n p0 = curve.splines[0].bezier_points[0]\n p1 = curve.splines[0].bezier_points[1]\n p1.select_control_point = True\n p0.select_control_point = False\n bpy.ops.object.hook_assign(modifier='beta')\n return obj\n\n\nclass Cell:\n\n def __init__(self, cb):\n self.alen = float(cb['_cell_length_a'])\n self.blen = float(cb['_cell_length_b'])\n self.clen = float(cb['_cell_length_c'])\n self.alpha = float(cb['_cell_angle_alpha'])\n self.beta = float(cb['_cell_angle_beta'])\n self.gamma = float(cb['_cell_angle_gamma'])\n\n def printout(self):\n print(\n 'alen:{:8} \\nblen:{:8} \\nclen:{:8} \\nalpha:{:8} \\nbeta: {:8} \\ngamma:{:8}'\n .format(self.alen, self.blen, self.clen, self.alpha, self.beta,\n self.gamma))\n\n\nclass Atom:\n\n def __init__(self, elid, sym, xpos, ypos, zpos):\n self.elid = elid\n self.sym = sym\n self.xpos = float(xpos)\n self.ypos = float(ypos)\n self.zpos = float(zpos)\n\n def printout(self):\n print('id:{:3} symbol:{:2} x:{:.4f} y:{:.4f} z:{:.4f}'.format(self.\n elid, self.sym, self.xpos, self.ypos, self.zpos))\n\n def drawObj(self, ftoc):\n size = sizedic[self.sym] * styledic[draw_style][0\n ] + bond_radius * styledic[draw_style][1]\n bpy.ops.mesh.primitive_uv_sphere_add(segments=qualitydic[\n draw_quality], ring_count=qualitydic[draw_quality] / 2, size=\n size, location=toCarth(ftoc, [self.xpos, self.ypos, self.zpos]))\n bpy.context.object.name = self.elid\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n if atom_name:\n bpy.context.object.show_name = True\n if atom_color:\n bpy.context.object.active_material.diffuse_color = colordic[self\n .sym]\n else:\n bpy.context.object.active_material.diffuse_color = [1, 1, 1]\n\n\nclass sympos:\n\n def __init__(self, string):\n self.xsym = string[0].split(',')[0]\n self.ysym = string[0].split(',')[1]\n self.zsym = string[0].split(',')[2]\n\n def printout(self):\n print('x:{:8} y:{:8} z:{:8}'.format(self.xsym, self.ysym, self.zsym))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ScanFileOperator(bpy.types.Operator):\n bl_idname = 'error.scan_file'\n bl_label = 'Scan file for return'\n filepath = bpy.props.StringProperty(subtype='FILE_PATH')\n\n def execute(self, context):\n global file_path\n global user_feedback\n user_feedback = ''\n file_path = self.filepath\n return {'FINISHED'}\n\n def invoke(self, context, event):\n context.window_manager.fileselect_add(self)\n return {'RUNNING_MODAL'}\n\n def register():\n bpy.types.Scene.path_to_file = bpy.props.StringProperty(name='',\n description='Path to CIF file', default='empty')\n\n\nclass Operator(bpy.types.Operator):\n bl_idname = 'object.cdtb_operator'\n bl_label = 'CDTB_operator'\n bl_descriptor = 'Operator for drawing crystal'\n\n def execute(self, context):\n global pars_check\n global user_feedback\n if pars_check:\n user_feedback = 'CiFFile module not installed'\n return {'FINISHED'}\n if file_path == 'Select a file':\n print('No file selected')\n user_feedback = 'No File selected'\n else:\n user_feedback = 'Crystal drawn'\n global draw_bonds\n draw_bonds = context.scene.draw_bonds\n global bond_distance\n bond_distance = context.scene.bond_distance\n global draw_lattice\n draw_lattice = context.scene.draw_lattice\n global atom_name\n atom_name = context.scene.atom_name\n global print_data\n print_data = context.scene.print_data\n global draw_style\n global atom_color\n draw_style = context.scene.style_selection_mode\n if draw_style == 'STICK':\n draw_bonds = True\n atom_color = False\n else:\n atom_color = True\n global draw_quality\n draw_quality = context.scene.quality_selection_mode\n global add_camera\n add_camera = context.scene.add_camera\n drawCrystal(file_path)\n return {'FINISHED'}\n\n @classmethod\n def register(cls):\n print('Registered class: %s ' % cls.bl_label)\n bpy.types.Scene.draw_bonds = bpy.props.BoolProperty(name=\n 'Draw bonds', description='Draw bonds between elements')\n bpy.types.Scene.bond_distance = bpy.props.FloatProperty(name=\n 'Bond distance', description=\n 'Set max distance for bonds to occur', default=2, min=0.0, max=\n 10.0, precision=2)\n bpy.types.Scene.atom_name = bpy.props.BoolProperty(name=\n 'Atom names', description='Display the name of atoms')\n bpy.types.Scene.draw_lattice = bpy.props.BoolProperty(name=\n 'Draw lattice', description='Draw unit cell outline')\n bpy.types.Scene.print_data = bpy.props.BoolProperty(name=\n 'Print data', description='Print crystal data in terminal')\n selection_style = [('SPACE FILLING', 'SPACE FILLING', '', 1), (\n 'BALL AND STICK', 'BALL AND STICK', '', 2), ('STICK', 'STICK',\n '', 3)]\n bpy.types.Scene.style_selection_mode = bpy.props.EnumProperty(items\n =selection_style, name='Style')\n selection_qual = [('MIN', 'MIN', '', 1), ('LOW', 'LOW', '', 2), (\n 'MED', 'MED', '', 3), ('HIGH', 'HIGH', '', 4), ('MAX', 'MAX',\n '', 5)]\n bpy.types.Scene.quality_selection_mode = bpy.props.EnumProperty(items\n =selection_qual, name='Quality', default='MED')\n bpy.types.Scene.add_camera = bpy.props.BoolProperty(name=\n 'Place camera', description=\n 'Place a camera and light to make rendering possible')\n\n @classmethod\n def unregister(cls):\n print('Unregistered class: %s ' % cls.bl_label)\n\n\nclass Panel(bpy.types.Panel):\n bl_idname = 'CDTB_Panel'\n bl_label = 'CDTB_Panel'\n bl_space_type = 'VIEW_3D'\n bl_region_type = 'TOOLS'\n bl_context = 'objectmode'\n bl_category = 'CDTB'\n\n def draw(self, context):\n scn = context.scene\n layout = self.layout\n layout.label(text='Input file', icon_value=112)\n \"\"\"\n for i in range(100):\n layout.label(text = str(i),icon_value =i)\n \"\"\"\n box = layout.box()\n row = box.row()\n splitrow = row.split(factor=0.075)\n left_col = splitrow.column()\n right_col = splitrow.column()\n left_col.operator('error.scan_file', icon_value=108, text='')\n right_col.label(text=file_path.rsplit('\\\\', 2)[-1])\n layout.label(text='Settings', icon_value=117)\n box = layout.box()\n box.prop(scn, 'draw_bonds')\n box.prop(scn, 'bond_distance')\n box.prop(scn, 'draw_lattice')\n box.prop(scn, 'atom_name')\n box.prop(scn, 'print_data')\n box.prop(scn, 'style_selection_mode')\n box.prop(scn, 'quality_selection_mode')\n box.prop(scn, 'add_camera')\n layout.separator()\n splitrow = layout.split(factor=0.3)\n col = splitrow.column()\n col.operator('object.cdtb_operator', text='Draw Crystal')\n col = splitrow.column()\n col.label(text=user_feedback)\n layout.separator()\n\n @classmethod\n def register(cls):\n print('Registered class: %s ' % cls.bl_label)\n\n @classmethod\n def unregister(cls):\n print('Unregistered class: %s ' % cls.bl_label)\n\n\n<mask token>\n\n\ndef unregister():\n bpy.utils.unregister_class(Operator)\n bpy.utils.unregister_class(Panel)\n bpy.utils.unregister_class(ScanFileOperator)\n\n\nclass Crysdata:\n\n def __init__(self, F, cb):\n self.start = time.time()\n print('Draw timer started')\n self.name = F\n self.cell = Cell(cb)\n self.atoms = readEl(cb)\n self.pos = readPos(cb)\n c = self.cell\n self.ftoc = self.get_fractional_to_cartesian_matrix(c.alen, c.blen,\n c.clen, c.alpha, c.beta, c.gamma)\n\n def printout(self):\n print(self.name)\n print()\n self.cell.printout()\n print()\n for element in self.pos:\n element.printout()\n print()\n for element in self.atoms:\n element.printout()\n print()\n print('Fractional to cartesian matrix:')\n print(self.ftoc)\n\n def get_fractional_to_cartesian_matrix(self, a, b, c, alpha, beta, gamma):\n \"\"\"\n Original code found at: https://gist.github.com/Bismarrck/a68da01f19b39320f78a\n\n !changed formula to resemble one found on: https://en.wikipedia.org/wiki/Fractional_coordinates\n\n Return the transformation matrix that converts fractional coordinates to\n cartesian coordinates.\n Parameters\n ----------\n a, b, c : float\n The lengths of the edges.\n alpha, gamma, beta : float\n The angles between the sides.\n angle_in_degrees : bool\n True if alpha, beta and gamma are expressed in degrees.\n Returns\n -------\n r : array_like\n The 3x3 rotation matrix. ``V_cart = np.dot(r, V_frac)``.\n \"\"\"\n alpha = np.deg2rad(alpha)\n beta = np.deg2rad(beta)\n gamma = np.deg2rad(gamma)\n cosa = np.cos(alpha)\n sina = np.sin(alpha)\n cosb = np.cos(beta)\n sinb = np.sin(beta)\n cosg = np.cos(gamma)\n sing = np.sin(gamma)\n volume = (1.0 - cosa ** 2.0 - cosb ** 2.0 - cosg ** 2.0 + 2.0 *\n cosa * cosb * cosg)\n volume = a * b * c * np.sqrt(volume)\n r = np.zeros((3, 3))\n r[0, 0] = float(a)\n r[0, 1] = float(b * cosg)\n r[0, 2] = float(c * cosb)\n r[1, 0] = float(0)\n r[1, 1] = float(b * sing)\n r[1, 2] = float(c * (cosa - cosb * cosg) / sing)\n r[2, 0] = float(0)\n r[2, 1] = float(0)\n r[2, 2] = float(volume / (a * b * sing))\n return r\n\n def drawCrystal(self):\n if draw_lattice:\n self.drawCell()\n print('Lattice drawn after {:.3f} seconds'.format(time.time() -\n self.start))\n self.drawAtoms()\n print('Atoms drawn after {:.3f} seconds'.format(time.time() - self.\n start))\n if draw_bonds:\n self.drawBonds()\n print('Bonds drawn after {:.3f} seconds'.format(time.time() -\n self.start))\n\n def drawAtoms(self):\n for a in self.atoms:\n a.drawObj(self.ftoc)\n print('Atoms drawn:', len(self.atoms))\n\n def drawCell(self):\n cell_corners = []\n cell_edges = []\n for i in range(2):\n for j in range(2):\n for k in range(2):\n bpy.ops.mesh.primitive_uv_sphere_add(size=lattice_size,\n location=toCarth(self.ftoc, [i, j, k]))\n activeObject = bpy.context.active_object\n cell_corners.append(activeObject)\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n bpy.context.object.active_material.diffuse_color = [0, 0, 0\n ]\n for i, j in zip([0, 0, 0, 1, 1, 2, 2, 3, 4, 4, 5, 6], [1, 2, 4, 3, \n 5, 3, 6, 7, 5, 6, 7, 7]):\n cell_edges.append(self.drawLine(cell_corners[i].location,\n cell_corners[j].location))\n for i in cell_corners:\n i.select_set(action='SELECT')\n for i in cell_edges:\n i.select_set(action='SELECT')\n bpy.context.view_layer.objects.active = cell_corners[0]\n bpy.ops.object.join()\n print('Cell box drawn')\n\n def drawLine(self, ac, tc):\n dx = tc[0] - ac[0]\n dy = tc[1] - ac[1]\n dz = tc[2] - ac[2]\n dist = np.sqrt(dx ** 2 + dy ** 2 + dz ** 2)\n bpy.ops.mesh.primitive_cylinder_add(vertices=qualitydic[\n draw_quality], radius=lattice_size, depth=dist, location=(dx / \n 2 + ac[0], dy / 2 + ac[1], dz / 2 + ac[2]))\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n bpy.context.object.active_material.diffuse_color = [0, 0, 0]\n phi = math.atan2(dy, dx)\n theta = math.acos(dz / dist)\n bpy.context.object.rotation_euler[1] = theta\n bpy.context.object.rotation_euler[2] = phi\n return activeObject\n\n def drawBonds(self):\n cnt = 0\n bpy.ops.curve.primitive_bezier_circle_add(location=(0, 0, 0),\n radius=bond_radius)\n bpy.context.object.name = 'bez'\n for atom in self.atoms:\n for target in self.atoms:\n if atom != target:\n if 'bond{}-{}'.format(target.elid, atom.elid\n ) in bpy.data.objects:\n continue\n if atom.sym == 'H' and target.sym == 'H':\n continue\n if calcDistance(self.ftoc, atom, target) <= bond_distance:\n self.makeBond(atom, target)\n cnt += 1\n print('Atom bonds drawn:', cnt)\n\n def makeBond(self, atom, target):\n if 'OBJECT' != bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n o1 = bpy.data.objects[atom.elid]\n o2 = bpy.data.objects[target.elid]\n bond = self.hookCurve(o1, o2, bpy.context.scene)\n bpy.context.object.data.bevel_object = bpy.data.objects['bez']\n bpy.context.object.name = 'bond{}-{}'.format(atom.elid, target.elid)\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n bpy.context.object.active_material.diffuse_color = [255, 255, 255]\n if 'OBJECT' != bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n\n def hookCurve(self, o1, o2, scn):\n curve = bpy.data.curves.new('link', 'CURVE')\n curve.dimensions = '3D'\n spline = curve.splines.new('BEZIER')\n spline.bezier_points.add(1)\n p0 = spline.bezier_points[0]\n p1 = spline.bezier_points[1]\n p0.handle_right_type = 'VECTOR'\n p1.handle_left_type = 'VECTOR'\n obj = bpy.data.objects.new('link', curve)\n m0 = obj.modifiers.new('alpha', 'HOOK')\n m0.object = o1\n m1 = obj.modifiers.new('beta', 'HOOK')\n m1.object = o2\n bpy.context.collection.objects.link(obj)\n bpy.context.view_layer.objects.active = obj\n bpy.ops.object.mode_set(mode='EDIT')\n p0 = curve.splines[0].bezier_points[0]\n p1 = curve.splines[0].bezier_points[1]\n p0.select_control_point = True\n p1.select_control_point = False\n bpy.ops.object.hook_assign(modifier='alpha')\n p0 = curve.splines[0].bezier_points[0]\n p1 = curve.splines[0].bezier_points[1]\n p1.select_control_point = True\n p0.select_control_point = False\n bpy.ops.object.hook_assign(modifier='beta')\n return obj\n\n\nclass Cell:\n\n def __init__(self, cb):\n self.alen = float(cb['_cell_length_a'])\n self.blen = float(cb['_cell_length_b'])\n self.clen = float(cb['_cell_length_c'])\n self.alpha = float(cb['_cell_angle_alpha'])\n self.beta = float(cb['_cell_angle_beta'])\n self.gamma = float(cb['_cell_angle_gamma'])\n\n def printout(self):\n print(\n 'alen:{:8} \\nblen:{:8} \\nclen:{:8} \\nalpha:{:8} \\nbeta: {:8} \\ngamma:{:8}'\n .format(self.alen, self.blen, self.clen, self.alpha, self.beta,\n self.gamma))\n\n\nclass Atom:\n\n def __init__(self, elid, sym, xpos, ypos, zpos):\n self.elid = elid\n self.sym = sym\n self.xpos = float(xpos)\n self.ypos = float(ypos)\n self.zpos = float(zpos)\n\n def printout(self):\n print('id:{:3} symbol:{:2} x:{:.4f} y:{:.4f} z:{:.4f}'.format(self.\n elid, self.sym, self.xpos, self.ypos, self.zpos))\n\n def drawObj(self, ftoc):\n size = sizedic[self.sym] * styledic[draw_style][0\n ] + bond_radius * styledic[draw_style][1]\n bpy.ops.mesh.primitive_uv_sphere_add(segments=qualitydic[\n draw_quality], ring_count=qualitydic[draw_quality] / 2, size=\n size, location=toCarth(ftoc, [self.xpos, self.ypos, self.zpos]))\n bpy.context.object.name = self.elid\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n if atom_name:\n bpy.context.object.show_name = True\n if atom_color:\n bpy.context.object.active_material.diffuse_color = colordic[self\n .sym]\n else:\n bpy.context.object.active_material.diffuse_color = [1, 1, 1]\n\n\nclass sympos:\n\n def __init__(self, string):\n self.xsym = string[0].split(',')[0]\n self.ysym = string[0].split(',')[1]\n self.zsym = string[0].split(',')[2]\n\n def printout(self):\n print('x:{:8} y:{:8} z:{:8}'.format(self.xsym, self.ysym, self.zsym))\n\n\ndef readEl(cb):\n elements = []\n previd = []\n idcnt = []\n lb = cb.GetLoop('_atom_site_label')\n for el in lb:\n flag = False\n for i in range(len(previd)):\n if el[0] == previd[i]:\n flag = True\n break\n if flag:\n idcnt[i] += 1\n else:\n previd.append(el[0])\n idcnt.append(0)\n i = len(idcnt) - 1\n id_t = '{}.{}'.format(el[0], idcnt[i])\n elements.append(Atom(id_t, el[1], el[2], el[3], el[4]))\n return elements\n\n\ndef readPos(cb):\n positions = []\n lb = cb.GetLoop('_symmetry_equiv_pos_as_xyz')\n for el in lb:\n positions.append(sympos(el))\n return positions\n\n\n<mask token>\n\n\ndef addCamera(x, y, z):\n bpy.ops.object.camera_add(view_align=True, enter_editmode=False,\n location=(5 * x, 5 * y, 5 * z))\n print('camera added')\n bpy.ops.object.light_add(type='SUN', view_align=False, location=(0, 0, 0))\n obj_camera = bpy.data.objects['Camera']\n look_at(obj_camera, Vector([0, 0, z / 4]))\n obj_camera.data.type = 'ORTHO'\n obj_camera.data.ortho_scale = x + y + z\n\n\ndef clearWS():\n if 'OBJECT' != bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n bpy.ops.object.select_all(action='SELECT')\n bpy.ops.object.delete(use_global=False)\n for i in bpy.data.curves:\n bpy.data.curves.remove(i)\n for m in bpy.data.materials:\n bpy.data.materials.remove(m)\n for c in bpy.data.cameras:\n bpy.data.cameras.remove(c)\n print('Workspace cleared.')\n return\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass ScanFileOperator(bpy.types.Operator):\n bl_idname = 'error.scan_file'\n bl_label = 'Scan file for return'\n filepath = bpy.props.StringProperty(subtype='FILE_PATH')\n\n def execute(self, context):\n global file_path\n global user_feedback\n user_feedback = ''\n file_path = self.filepath\n return {'FINISHED'}\n\n def invoke(self, context, event):\n context.window_manager.fileselect_add(self)\n return {'RUNNING_MODAL'}\n\n def register():\n bpy.types.Scene.path_to_file = bpy.props.StringProperty(name='',\n description='Path to CIF file', default='empty')\n\n\nclass Operator(bpy.types.Operator):\n bl_idname = 'object.cdtb_operator'\n bl_label = 'CDTB_operator'\n bl_descriptor = 'Operator for drawing crystal'\n\n def execute(self, context):\n global pars_check\n global user_feedback\n if pars_check:\n user_feedback = 'CiFFile module not installed'\n return {'FINISHED'}\n if file_path == 'Select a file':\n print('No file selected')\n user_feedback = 'No File selected'\n else:\n user_feedback = 'Crystal drawn'\n global draw_bonds\n draw_bonds = context.scene.draw_bonds\n global bond_distance\n bond_distance = context.scene.bond_distance\n global draw_lattice\n draw_lattice = context.scene.draw_lattice\n global atom_name\n atom_name = context.scene.atom_name\n global print_data\n print_data = context.scene.print_data\n global draw_style\n global atom_color\n draw_style = context.scene.style_selection_mode\n if draw_style == 'STICK':\n draw_bonds = True\n atom_color = False\n else:\n atom_color = True\n global draw_quality\n draw_quality = context.scene.quality_selection_mode\n global add_camera\n add_camera = context.scene.add_camera\n drawCrystal(file_path)\n return {'FINISHED'}\n\n @classmethod\n def register(cls):\n print('Registered class: %s ' % cls.bl_label)\n bpy.types.Scene.draw_bonds = bpy.props.BoolProperty(name=\n 'Draw bonds', description='Draw bonds between elements')\n bpy.types.Scene.bond_distance = bpy.props.FloatProperty(name=\n 'Bond distance', description=\n 'Set max distance for bonds to occur', default=2, min=0.0, max=\n 10.0, precision=2)\n bpy.types.Scene.atom_name = bpy.props.BoolProperty(name=\n 'Atom names', description='Display the name of atoms')\n bpy.types.Scene.draw_lattice = bpy.props.BoolProperty(name=\n 'Draw lattice', description='Draw unit cell outline')\n bpy.types.Scene.print_data = bpy.props.BoolProperty(name=\n 'Print data', description='Print crystal data in terminal')\n selection_style = [('SPACE FILLING', 'SPACE FILLING', '', 1), (\n 'BALL AND STICK', 'BALL AND STICK', '', 2), ('STICK', 'STICK',\n '', 3)]\n bpy.types.Scene.style_selection_mode = bpy.props.EnumProperty(items\n =selection_style, name='Style')\n selection_qual = [('MIN', 'MIN', '', 1), ('LOW', 'LOW', '', 2), (\n 'MED', 'MED', '', 3), ('HIGH', 'HIGH', '', 4), ('MAX', 'MAX',\n '', 5)]\n bpy.types.Scene.quality_selection_mode = bpy.props.EnumProperty(items\n =selection_qual, name='Quality', default='MED')\n bpy.types.Scene.add_camera = bpy.props.BoolProperty(name=\n 'Place camera', description=\n 'Place a camera and light to make rendering possible')\n\n @classmethod\n def unregister(cls):\n print('Unregistered class: %s ' % cls.bl_label)\n\n\nclass Panel(bpy.types.Panel):\n bl_idname = 'CDTB_Panel'\n bl_label = 'CDTB_Panel'\n bl_space_type = 'VIEW_3D'\n bl_region_type = 'TOOLS'\n bl_context = 'objectmode'\n bl_category = 'CDTB'\n\n def draw(self, context):\n scn = context.scene\n layout = self.layout\n layout.label(text='Input file', icon_value=112)\n \"\"\"\n for i in range(100):\n layout.label(text = str(i),icon_value =i)\n \"\"\"\n box = layout.box()\n row = box.row()\n splitrow = row.split(factor=0.075)\n left_col = splitrow.column()\n right_col = splitrow.column()\n left_col.operator('error.scan_file', icon_value=108, text='')\n right_col.label(text=file_path.rsplit('\\\\', 2)[-1])\n layout.label(text='Settings', icon_value=117)\n box = layout.box()\n box.prop(scn, 'draw_bonds')\n box.prop(scn, 'bond_distance')\n box.prop(scn, 'draw_lattice')\n box.prop(scn, 'atom_name')\n box.prop(scn, 'print_data')\n box.prop(scn, 'style_selection_mode')\n box.prop(scn, 'quality_selection_mode')\n box.prop(scn, 'add_camera')\n layout.separator()\n splitrow = layout.split(factor=0.3)\n col = splitrow.column()\n col.operator('object.cdtb_operator', text='Draw Crystal')\n col = splitrow.column()\n col.label(text=user_feedback)\n layout.separator()\n\n @classmethod\n def register(cls):\n print('Registered class: %s ' % cls.bl_label)\n\n @classmethod\n def unregister(cls):\n print('Unregistered class: %s ' % cls.bl_label)\n\n\n<mask token>\n\n\ndef unregister():\n bpy.utils.unregister_class(Operator)\n bpy.utils.unregister_class(Panel)\n bpy.utils.unregister_class(ScanFileOperator)\n\n\nclass Crysdata:\n\n def __init__(self, F, cb):\n self.start = time.time()\n print('Draw timer started')\n self.name = F\n self.cell = Cell(cb)\n self.atoms = readEl(cb)\n self.pos = readPos(cb)\n c = self.cell\n self.ftoc = self.get_fractional_to_cartesian_matrix(c.alen, c.blen,\n c.clen, c.alpha, c.beta, c.gamma)\n\n def printout(self):\n print(self.name)\n print()\n self.cell.printout()\n print()\n for element in self.pos:\n element.printout()\n print()\n for element in self.atoms:\n element.printout()\n print()\n print('Fractional to cartesian matrix:')\n print(self.ftoc)\n\n def get_fractional_to_cartesian_matrix(self, a, b, c, alpha, beta, gamma):\n \"\"\"\n Original code found at: https://gist.github.com/Bismarrck/a68da01f19b39320f78a\n\n !changed formula to resemble one found on: https://en.wikipedia.org/wiki/Fractional_coordinates\n\n Return the transformation matrix that converts fractional coordinates to\n cartesian coordinates.\n Parameters\n ----------\n a, b, c : float\n The lengths of the edges.\n alpha, gamma, beta : float\n The angles between the sides.\n angle_in_degrees : bool\n True if alpha, beta and gamma are expressed in degrees.\n Returns\n -------\n r : array_like\n The 3x3 rotation matrix. ``V_cart = np.dot(r, V_frac)``.\n \"\"\"\n alpha = np.deg2rad(alpha)\n beta = np.deg2rad(beta)\n gamma = np.deg2rad(gamma)\n cosa = np.cos(alpha)\n sina = np.sin(alpha)\n cosb = np.cos(beta)\n sinb = np.sin(beta)\n cosg = np.cos(gamma)\n sing = np.sin(gamma)\n volume = (1.0 - cosa ** 2.0 - cosb ** 2.0 - cosg ** 2.0 + 2.0 *\n cosa * cosb * cosg)\n volume = a * b * c * np.sqrt(volume)\n r = np.zeros((3, 3))\n r[0, 0] = float(a)\n r[0, 1] = float(b * cosg)\n r[0, 2] = float(c * cosb)\n r[1, 0] = float(0)\n r[1, 1] = float(b * sing)\n r[1, 2] = float(c * (cosa - cosb * cosg) / sing)\n r[2, 0] = float(0)\n r[2, 1] = float(0)\n r[2, 2] = float(volume / (a * b * sing))\n return r\n\n def drawCrystal(self):\n if draw_lattice:\n self.drawCell()\n print('Lattice drawn after {:.3f} seconds'.format(time.time() -\n self.start))\n self.drawAtoms()\n print('Atoms drawn after {:.3f} seconds'.format(time.time() - self.\n start))\n if draw_bonds:\n self.drawBonds()\n print('Bonds drawn after {:.3f} seconds'.format(time.time() -\n self.start))\n\n def drawAtoms(self):\n for a in self.atoms:\n a.drawObj(self.ftoc)\n print('Atoms drawn:', len(self.atoms))\n\n def drawCell(self):\n cell_corners = []\n cell_edges = []\n for i in range(2):\n for j in range(2):\n for k in range(2):\n bpy.ops.mesh.primitive_uv_sphere_add(size=lattice_size,\n location=toCarth(self.ftoc, [i, j, k]))\n activeObject = bpy.context.active_object\n cell_corners.append(activeObject)\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n bpy.context.object.active_material.diffuse_color = [0, 0, 0\n ]\n for i, j in zip([0, 0, 0, 1, 1, 2, 2, 3, 4, 4, 5, 6], [1, 2, 4, 3, \n 5, 3, 6, 7, 5, 6, 7, 7]):\n cell_edges.append(self.drawLine(cell_corners[i].location,\n cell_corners[j].location))\n for i in cell_corners:\n i.select_set(action='SELECT')\n for i in cell_edges:\n i.select_set(action='SELECT')\n bpy.context.view_layer.objects.active = cell_corners[0]\n bpy.ops.object.join()\n print('Cell box drawn')\n\n def drawLine(self, ac, tc):\n dx = tc[0] - ac[0]\n dy = tc[1] - ac[1]\n dz = tc[2] - ac[2]\n dist = np.sqrt(dx ** 2 + dy ** 2 + dz ** 2)\n bpy.ops.mesh.primitive_cylinder_add(vertices=qualitydic[\n draw_quality], radius=lattice_size, depth=dist, location=(dx / \n 2 + ac[0], dy / 2 + ac[1], dz / 2 + ac[2]))\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n bpy.context.object.active_material.diffuse_color = [0, 0, 0]\n phi = math.atan2(dy, dx)\n theta = math.acos(dz / dist)\n bpy.context.object.rotation_euler[1] = theta\n bpy.context.object.rotation_euler[2] = phi\n return activeObject\n\n def drawBonds(self):\n cnt = 0\n bpy.ops.curve.primitive_bezier_circle_add(location=(0, 0, 0),\n radius=bond_radius)\n bpy.context.object.name = 'bez'\n for atom in self.atoms:\n for target in self.atoms:\n if atom != target:\n if 'bond{}-{}'.format(target.elid, atom.elid\n ) in bpy.data.objects:\n continue\n if atom.sym == 'H' and target.sym == 'H':\n continue\n if calcDistance(self.ftoc, atom, target) <= bond_distance:\n self.makeBond(atom, target)\n cnt += 1\n print('Atom bonds drawn:', cnt)\n\n def makeBond(self, atom, target):\n if 'OBJECT' != bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n o1 = bpy.data.objects[atom.elid]\n o2 = bpy.data.objects[target.elid]\n bond = self.hookCurve(o1, o2, bpy.context.scene)\n bpy.context.object.data.bevel_object = bpy.data.objects['bez']\n bpy.context.object.name = 'bond{}-{}'.format(atom.elid, target.elid)\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n bpy.context.object.active_material.diffuse_color = [255, 255, 255]\n if 'OBJECT' != bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n\n def hookCurve(self, o1, o2, scn):\n curve = bpy.data.curves.new('link', 'CURVE')\n curve.dimensions = '3D'\n spline = curve.splines.new('BEZIER')\n spline.bezier_points.add(1)\n p0 = spline.bezier_points[0]\n p1 = spline.bezier_points[1]\n p0.handle_right_type = 'VECTOR'\n p1.handle_left_type = 'VECTOR'\n obj = bpy.data.objects.new('link', curve)\n m0 = obj.modifiers.new('alpha', 'HOOK')\n m0.object = o1\n m1 = obj.modifiers.new('beta', 'HOOK')\n m1.object = o2\n bpy.context.collection.objects.link(obj)\n bpy.context.view_layer.objects.active = obj\n bpy.ops.object.mode_set(mode='EDIT')\n p0 = curve.splines[0].bezier_points[0]\n p1 = curve.splines[0].bezier_points[1]\n p0.select_control_point = True\n p1.select_control_point = False\n bpy.ops.object.hook_assign(modifier='alpha')\n p0 = curve.splines[0].bezier_points[0]\n p1 = curve.splines[0].bezier_points[1]\n p1.select_control_point = True\n p0.select_control_point = False\n bpy.ops.object.hook_assign(modifier='beta')\n return obj\n\n\nclass Cell:\n\n def __init__(self, cb):\n self.alen = float(cb['_cell_length_a'])\n self.blen = float(cb['_cell_length_b'])\n self.clen = float(cb['_cell_length_c'])\n self.alpha = float(cb['_cell_angle_alpha'])\n self.beta = float(cb['_cell_angle_beta'])\n self.gamma = float(cb['_cell_angle_gamma'])\n\n def printout(self):\n print(\n 'alen:{:8} \\nblen:{:8} \\nclen:{:8} \\nalpha:{:8} \\nbeta: {:8} \\ngamma:{:8}'\n .format(self.alen, self.blen, self.clen, self.alpha, self.beta,\n self.gamma))\n\n\nclass Atom:\n\n def __init__(self, elid, sym, xpos, ypos, zpos):\n self.elid = elid\n self.sym = sym\n self.xpos = float(xpos)\n self.ypos = float(ypos)\n self.zpos = float(zpos)\n\n def printout(self):\n print('id:{:3} symbol:{:2} x:{:.4f} y:{:.4f} z:{:.4f}'.format(self.\n elid, self.sym, self.xpos, self.ypos, self.zpos))\n\n def drawObj(self, ftoc):\n size = sizedic[self.sym] * styledic[draw_style][0\n ] + bond_radius * styledic[draw_style][1]\n bpy.ops.mesh.primitive_uv_sphere_add(segments=qualitydic[\n draw_quality], ring_count=qualitydic[draw_quality] / 2, size=\n size, location=toCarth(ftoc, [self.xpos, self.ypos, self.zpos]))\n bpy.context.object.name = self.elid\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name='MaterialName')\n activeObject.data.materials.append(mat)\n if atom_name:\n bpy.context.object.show_name = True\n if atom_color:\n bpy.context.object.active_material.diffuse_color = colordic[self\n .sym]\n else:\n bpy.context.object.active_material.diffuse_color = [1, 1, 1]\n\n\nclass sympos:\n\n def __init__(self, string):\n self.xsym = string[0].split(',')[0]\n self.ysym = string[0].split(',')[1]\n self.zsym = string[0].split(',')[2]\n\n def printout(self):\n print('x:{:8} y:{:8} z:{:8}'.format(self.xsym, self.ysym, self.zsym))\n\n\ndef readEl(cb):\n elements = []\n previd = []\n idcnt = []\n lb = cb.GetLoop('_atom_site_label')\n for el in lb:\n flag = False\n for i in range(len(previd)):\n if el[0] == previd[i]:\n flag = True\n break\n if flag:\n idcnt[i] += 1\n else:\n previd.append(el[0])\n idcnt.append(0)\n i = len(idcnt) - 1\n id_t = '{}.{}'.format(el[0], idcnt[i])\n elements.append(Atom(id_t, el[1], el[2], el[3], el[4]))\n return elements\n\n\ndef readPos(cb):\n positions = []\n lb = cb.GetLoop('_symmetry_equiv_pos_as_xyz')\n for el in lb:\n positions.append(sympos(el))\n return positions\n\n\n<mask token>\n\n\ndef look_at(obj_camera, point):\n loc_camera = obj_camera.matrix_world.to_translation()\n direction = point - loc_camera\n rot_quat = direction.to_track_quat('-Z', 'Y')\n obj_camera.rotation_euler = rot_quat.to_euler()\n\n\ndef addCamera(x, y, z):\n bpy.ops.object.camera_add(view_align=True, enter_editmode=False,\n location=(5 * x, 5 * y, 5 * z))\n print('camera added')\n bpy.ops.object.light_add(type='SUN', view_align=False, location=(0, 0, 0))\n obj_camera = bpy.data.objects['Camera']\n look_at(obj_camera, Vector([0, 0, z / 4]))\n obj_camera.data.type = 'ORTHO'\n obj_camera.data.ortho_scale = x + y + z\n\n\ndef clearWS():\n if 'OBJECT' != bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n bpy.ops.object.select_all(action='SELECT')\n bpy.ops.object.delete(use_global=False)\n for i in bpy.data.curves:\n bpy.data.curves.remove(i)\n for m in bpy.data.materials:\n bpy.data.materials.remove(m)\n for c in bpy.data.cameras:\n bpy.data.cameras.remove(c)\n print('Workspace cleared.')\n return\n\n\n<mask token>\n",
"step-5": "# -------------------------------------------\n# MODULES\n# -------------------------------------------\nimport sys\nimport platform\nif(platform.system()== \"Windows\"):\n\tdir_sep = \"\\\\\"\nelse:\n\tdir_sep = \"/\"\nimport time\nimport os\nimport numpy as np\nimport subprocess\nimport math\nfrom mathutils import Vector\ntry:\n from CifFile import CifFile\n pars_check = False\nexcept:\n print(\"PyCIFRW not installed, try: pip install PyCifRW\")\n pars_check = True\ntry:\n import bpy\n Blender_env = True\nexcept:\n print(\"Not in blender environment.\")\n\n# -------------------------------------------\n# VARIABLES\n# -------------------------------------------\n\n# global variables\nfile_path = \"Select a file\" # path to CIF-file\ndraw_bonds = False # draws bonds between atoms\ndraw_style = \"SPACE FILLING\" # sets draw style\ndraw_quality = \"MED\" # sets key for qualitydic\ndraw_lattice = False # draws unit cell outline\natom_name = False # displays names of atoms\nbond_distance = 2 # set the max distance between bound atoms\nlattice_size = 0.03 # sets size of lattice borders\nbond_radius = 0.05 # radius of bond\nadd_camera\t =\tTrue\t\t\t# render final image\natom_color\t\t=\tTrue\t\t\t# draw atoms in color\nuser_feedback = \"\" # feedback for the user\nprint_data = True\n\n\n# dictionaries\n# sets detail of spheres\nstyledic = {\n \"SPACE FILLING\" : [1,0],\n \"BALL AND STICK\" : [0.5,0],\n \"STICK\" : [0,1]\n }\n\n# sets detail of spheres\nqualitydic = {\n \"MIN\" : 8,\n \"LOW\" : 16,\n \"MED\" : 32,\n \"HIGH\" : 64,\n \"MAX\" : 128\n }\n\n'''\nUncomment this when no external dictionaries are found\n# dictionary which couples atoms to a color\ncolordic = {\n \"O\" : [1,0,0],\n \"Si\" : [0.25,0.25,1],\n \"Fe\" : [1,0.2,0.2],\n }\n\n# dictionary which couples atoms to a specific size\nsizedic = {\n \"O\" : 0.3,\n \"Si\" : 0.6,\n \"Fe\" : 1.4,\n }\n'''\n# Read in dictionaries from external files\n\n\n\npath = os.path.dirname(os.path.realpath(__file__))\n# dictionary which couples atoms to a color\n# Color scheme, in RGB percentages, following the CPK convention was extracted from https://en.wikipedia.org/wiki/CPK_coloring#Typical_assignments\n# data can be changed by modifying the values in colordic.txt\nwith open(path+dir_sep+'colordic.txt','r') as inf:\n colordic = eval(inf.read())\n\n# dictionary which couples atoms to a specific size\n# Atom data, in Ångström, was extracted from https://en.wikipedia.org/wiki/Atomic_radii_of_the_elements_(data_page)\n# data can be changed by modifying the values in sizedic.txt\nwith open(path+dir_sep+'sizedic.txt','r') as inf:\n sizedic = eval(inf.read())\n\n\n# ----------------------------------------------\n# BLENDER ADD-ON\n# ----------------------------------------------\n\n# add-on info\nbl_info = {\n \"name\": \"Crystallographic Drawing Tool for Blender\",\n \"description\": \"Add-on for drawing crystals from CIF-files.\",\n \"author\": \"Jarrit Boons\",\n \"blender\": (2, 80,0),\n \"location\": \"View3D\",\n \"category\": \"Crystallography in Blender\"\n}\n\n\n# Operator to open the file browser and select a file\nclass ScanFileOperator(bpy.types.Operator):\n\n bl_idname = \"error.scan_file\"\n bl_label = \"Scan file for return\"\n filepath = bpy.props.StringProperty(subtype=\"FILE_PATH\")\n\n def execute(self, context):\n\n global file_path\n global user_feedback\n user_feedback = \"\"\n file_path = self.filepath\n return {'FINISHED'}\n\n\n def invoke(self, context, event):\n\n context.window_manager.fileselect_add(self)\n return {'RUNNING_MODAL'}\n\n\n def register():\n\n bpy.types.Scene.path_to_file = bpy.props.StringProperty(\n name=\"\",\n description=\"Path to CIF file\",\n default = \"empty\"\n )\n\n# Operator to hold CDTB-data and program execution\nclass Operator(bpy.types.Operator):\n\n bl_idname = \"object.cdtb_operator\"\n bl_label = \"CDTB_operator\"\n bl_descriptor = \"Operator for drawing crystal\"\n\n # Runs the whole program\n def execute(self, context):\n global pars_check\n global user_feedback\n\n if(pars_check):\n user_feedback = \"CiFFile module not installed\"\n return {'FINISHED'}\n\n if(file_path == \"Select a file\"):\n print(\"No file selected\")\n user_feedback = \"No File selected\"\n else:\n user_feedback = \"Crystal drawn\"\n\n global draw_bonds\n draw_bonds = context.scene.draw_bonds\n\n global bond_distance\n bond_distance = context.scene.bond_distance\n\n global draw_lattice\n draw_lattice = context.scene.draw_lattice\n\n global atom_name\n atom_name = context.scene.atom_name\n\n global print_data\n print_data = context.scene.print_data\n\n global draw_style\n global atom_color\n draw_style = context.scene.style_selection_mode\n if(draw_style==\"STICK\"):\n draw_bonds = True\n atom_color = False\n else:\n atom_color = True\n\n global draw_quality\n draw_quality = context.scene.quality_selection_mode\n global add_camera\n add_camera = context.scene.add_camera\n drawCrystal(file_path)\n\n return {'FINISHED'}\n\n\n @classmethod\n def register(cls):\n\n print(\"Registered class: %s \" % cls.bl_label)\n bpy.types.Scene.draw_bonds = bpy.props.BoolProperty(\n name=\"Draw bonds\",\n description=\"Draw bonds between elements\"\n )\n\n bpy.types.Scene.bond_distance = bpy.props.FloatProperty(\n name=\"Bond distance\",\n description=\"Set max distance for bonds to occur\",\n default=2,\n min=0.0,\n max=10.0,\n precision=2\n )\n\n bpy.types.Scene.atom_name = bpy.props.BoolProperty(\n name=\"Atom names\",\n description=\"Display the name of atoms\"\n )\n\n bpy.types.Scene.draw_lattice = bpy.props.BoolProperty(\n name=\"Draw lattice\",\n description=\"Draw unit cell outline\"\n )\n\n bpy.types.Scene.print_data = bpy.props.BoolProperty(\n name=\"Print data\",\n description=\"Print crystal data in terminal\"\n )\n\n # Dropdown menu for drawing style\n selection_style = [\n (\"SPACE FILLING\", \"SPACE FILLING\", \"\", 1),\n (\"BALL AND STICK\", \"BALL AND STICK\", \"\", 2),\n (\"STICK\", \"STICK\", \"\", 3),\n ]\n\n bpy.types.Scene.style_selection_mode = bpy.props.EnumProperty(\n items=selection_style,\n name=\"Style\"\n )\n\n # Dropdown menu for drawing quality\n selection_qual = [\n (\"MIN\", \"MIN\", \"\", 1),\n (\"LOW\", \"LOW\", \"\", 2),\n (\"MED\", \"MED\", \"\", 3),\n (\"HIGH\", \"HIGH\", \"\", 4),\n (\"MAX\", \"MAX\", \"\", 5)\n ]\n\n bpy.types.Scene.quality_selection_mode = bpy.props.EnumProperty(\n items=selection_qual,\n name=\"Quality\",\n default=\"MED\"\n )\n bpy.types.Scene.add_camera = bpy.props.BoolProperty(\n name=\"Place camera\",\n description=\"Place a camera and light to make rendering possible\"\n )\n\n\n @classmethod\n def unregister(cls):\n\n print(\"Unregistered class: %s \" % cls.bl_label)\n\n# Panel to display add-on in Blender environment\nclass Panel(bpy.types.Panel):\n\n bl_idname = \"CDTB_Panel\"\n bl_label = \"CDTB_Panel\"\n bl_space_type = \"VIEW_3D\"\n bl_region_type = \"TOOLS\"\n bl_context = \"objectmode\"\n bl_category = \"CDTB\"\n\n def draw(self,context):\n\n scn = context.scene\n layout = self.layout\n layout.label(text = 'Input file',icon_value=112)\n\n '''\n for i in range(100):\n layout.label(text = str(i),icon_value =i)\n '''\n\n box = layout.box()\n row = box.row()\n splitrow = row.split(factor=0.075)\n left_col = splitrow.column()\n right_col = splitrow.column()\n left_col.operator('error.scan_file',icon_value=108,text=\"\")\n right_col.label(text=file_path.rsplit('\\\\', 2)[-1])\n layout.label(text = 'Settings',icon_value =117)\n box = layout.box()\n box.prop(scn,'draw_bonds')\n box.prop(scn,'bond_distance')\n box.prop(scn,'draw_lattice')\n box.prop(scn, 'atom_name')\n box.prop(scn,'print_data')\n box.prop(scn, 'style_selection_mode')\n box.prop(scn, 'quality_selection_mode')\n box.prop(scn, 'add_camera')\n layout.separator()\n splitrow = layout.split(factor=0.3)\n col = splitrow.column()\n col.operator('object.cdtb_operator',text=\"Draw Crystal\")\n col = splitrow.column()\n col.label(text=user_feedback)\n layout.separator()\n\n\n @classmethod\n def register(cls):\n\n print(\"Registered class: %s \" % cls.bl_label)\n\n\n @classmethod\n def unregister(cls):\n\n print(\"Unregistered class: %s \" % cls.bl_label)\n\n\ndef register():\n\n bpy.utils.register_class(Operator)\n bpy.utils.register_class(ScanFileOperator)\n bpy.utils.register_class(Panel)\n\n\ndef unregister():\n\n bpy.utils.unregister_class(Operator)\n bpy.utils.unregister_class(Panel)\n bpy.utils.unregister_class(ScanFileOperator)\n\n\n#----------------------------------------------\n# MAIN PROGRAM\n#----------------------------------------------\n\n\nclass Crysdata():\n\n def __init__(self,F,cb):\n\n self.start = time.time()\n print(\"Draw timer started\")\n self.name = F\n self.cell = Cell(cb)\n self.atoms = readEl(cb)\n self.pos = readPos(cb)\n c = self.cell\n self.ftoc = self.get_fractional_to_cartesian_matrix(c.alen,c.blen,c.clen,c.alpha,c.beta,c.gamma)\n\n\n def printout(self):\n\n print(self.name)\n print()\n self.cell.printout()\n print()\n for element in self.pos:\n element.printout()\n print()\n for element in self.atoms:\n element.printout()\n print()\n print(\"Fractional to cartesian matrix:\")\n print(self.ftoc)\n\n\n def get_fractional_to_cartesian_matrix(self,a, b, c, alpha, beta, gamma):\n\n \"\"\"\n Original code found at: https://gist.github.com/Bismarrck/a68da01f19b39320f78a\n\n !changed formula to resemble one found on: https://en.wikipedia.org/wiki/Fractional_coordinates\n\n Return the transformation matrix that converts fractional coordinates to\n cartesian coordinates.\n Parameters\n ----------\n a, b, c : float\n The lengths of the edges.\n alpha, gamma, beta : float\n The angles between the sides.\n angle_in_degrees : bool\n True if alpha, beta and gamma are expressed in degrees.\n Returns\n -------\n r : array_like\n The 3x3 rotation matrix. ``V_cart = np.dot(r, V_frac)``.\n \"\"\"\n\n alpha = np.deg2rad(alpha)\n beta = np.deg2rad(beta)\n gamma = np.deg2rad(gamma)\n cosa = np.cos(alpha)\n sina = np.sin(alpha)\n cosb = np.cos(beta)\n sinb = np.sin(beta)\n cosg = np.cos(gamma)\n sing = np.sin(gamma)\n volume = 1.0 - cosa**2.0 - cosb**2.0 - cosg**2.0 + 2.0 * cosa * cosb * cosg\n volume = a*b*c*np.sqrt(volume)\n r = np.zeros((3, 3))\n r[0, 0] = float(a)\n r[0, 1] = float(b * cosg)\n r[0, 2] = float(c * cosb)\n r[1, 0] = float(0)\n r[1, 1] = float(b * sing)\n r[1, 2] = float(c * (cosa - cosb * cosg) / sing)\n r[2, 0] = float(0)\n r[2, 1] = float(0)\n r[2, 2] = float(volume / (a*b*sing))\n return r\n\n\n def drawCrystal(self):\n\n if draw_lattice:\n self.drawCell()\n print(\"Lattice drawn after {:.3f} seconds\".format((time.time()-self.start)))\n self.drawAtoms()\n print(\"Atoms drawn after {:.3f} seconds\".format((time.time()-self.start)))\n if(draw_bonds):\n self.drawBonds()\n print(\"Bonds drawn after {:.3f} seconds\".format((time.time()-self.start)))\n\n\n def drawAtoms(self):\n\n for a in self.atoms:\n a.drawObj(self.ftoc)\n print(\"Atoms drawn:\",len(self.atoms))\n\n\n def drawCell(self):\n\n cell_corners=[]\n cell_edges=[]\n # calculate and draw corners\n for i in range(2):\n for j in range(2):\n for k in range(2):\n bpy.ops.mesh.primitive_uv_sphere_add(size=lattice_size,location=toCarth(self.ftoc,[i,j,k]))\n activeObject = bpy.context.active_object # Set active object to variable\n cell_corners.append(activeObject)\n mat = bpy.data.materials.new(name=\"MaterialName\") # set new material to variable\n activeObject.data.materials.append(mat) # add the material to the object\n bpy.context.object.active_material.diffuse_color = [0,0,0] # change color\n # draw lines\n for i,j in zip([0,0,0,1,1,2,2,3,4,4,5,6],[1,2,4,3,5,3,6,7,5,6,7,7]):\n cell_edges.append(self.drawLine(cell_corners[i].location,cell_corners[j].location))\n # select all line and corners\n for i in cell_corners:\n i.select_set(action=\"SELECT\")\n for i in cell_edges:\n i.select_set(action=\"SELECT\")\n # set corner in origin as active and join meshes as one object\n bpy.context.view_layer.objects.active = cell_corners[0]\n bpy.ops.object.join()\n\n print(\"Cell box drawn\")\n\n\n def drawLine(self,ac,tc):\n\n dx = tc[0] - ac[0]\n dy = tc[1] - ac[1]\n dz = tc[2] - ac[2]\n dist = np.sqrt(dx**2 + dy**2 + dz**2)\n bpy.ops.mesh.primitive_cylinder_add(vertices=qualitydic[draw_quality],radius=lattice_size,depth = dist,location = (dx/2 + ac[0], dy/2 + ac[1], dz/2 + ac[2]))\n activeObject = bpy.context.active_object\n mat = bpy.data.materials.new(name=\"MaterialName\") # set new material to variable\n activeObject.data.materials.append(mat) # add the material to the object\n bpy.context.object.active_material.diffuse_color = [0,0,0] # change color\n\n phi = math.atan2(dy, dx)\n theta = math.acos(dz/dist)\n\n bpy.context.object.rotation_euler[1] = theta\n bpy.context.object.rotation_euler[2] = phi\n return activeObject\n\n\n def drawBonds(self):\n\n cnt = 0\n bpy.ops.curve.primitive_bezier_circle_add(location=(0,0,0),radius = bond_radius)\n bpy.context.object.name = 'bez'\n for atom in self.atoms:\n for target in self.atoms:\n if atom != target:\n if(\"bond{}-{}\".format(target.elid,atom.elid)in bpy.data.objects):\n continue\n if(atom.sym == 'H' and target.sym == 'H'):\n continue\n if calcDistance(self.ftoc,atom,target) <= bond_distance:\n self.makeBond(atom,target)\n cnt += 1\n print(\"Atom bonds drawn:\",cnt)\n\n\n # This function hooks the bond to the atoms\n def makeBond(self,atom,target):\n\n if 'OBJECT'!=bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n o1 = bpy.data.objects[atom.elid]\n o2 = bpy.data.objects[target.elid]\n bond = self.hookCurve(o1,o2, bpy.context.scene)\n bpy.context.object.data.bevel_object = bpy.data.objects[\"bez\"]\n bpy.context.object.name = \"bond{}-{}\".format(atom.elid,target.elid)\n activeObject = bpy.context.active_object # Set active object to variable\n mat = bpy.data.materials.new(name=\"MaterialName\") # set new material to variable\n activeObject.data.materials.append(mat) # add the material to the object\n bpy.context.object.active_material.diffuse_color = [255,255,255] # change color\n if 'OBJECT'!=bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n\n\n def hookCurve(self,o1, o2, scn):\n\n curve = bpy.data.curves.new(\"link\", 'CURVE')\n curve.dimensions = '3D'\n spline = curve.splines.new('BEZIER')\n\n spline.bezier_points.add(1)\n p0 = spline.bezier_points[0]\n p1 = spline.bezier_points[1]\n # p0.co = o1.location\n p0.handle_right_type = 'VECTOR'\n # p1.co = o2.location\n p1.handle_left_type = 'VECTOR'\n\n\n obj = bpy.data.objects.new(\"link\", curve)\n m0 = obj.modifiers.new(\"alpha\", 'HOOK')\n m0.object = o1\n m1 = obj.modifiers.new(\"beta\", 'HOOK')\n m1.object = o2\n\n bpy.context.collection.objects.link(obj)\n bpy.context.view_layer.objects.active = obj\n\n bpy.ops.object.mode_set(mode='EDIT')\n\n # Reassign the points\n p0 = curve.splines[0].bezier_points[0]\n p1 = curve.splines[0].bezier_points[1]\n\n # Hook first control point to first atom\n p0.select_control_point = True\n p1.select_control_point = False\n bpy.ops.object.hook_assign(modifier=\"alpha\")\n\n # Hook second control point to first atom\n p0 = curve.splines[0].bezier_points[0]\n p1 = curve.splines[0].bezier_points[1]\n p1.select_control_point = True\n p0.select_control_point = False\n bpy.ops.object.hook_assign(modifier=\"beta\")\n\n return obj\n\n\nclass Cell():\n\n def __init__(self,cb):\n\n self.alen = float(cb[\"_cell_length_a\"])\n self.blen = float(cb[\"_cell_length_b\"])\n self.clen = float(cb[\"_cell_length_c\"])\n self.alpha = float(cb[\"_cell_angle_alpha\"])\n self.beta = float(cb[\"_cell_angle_beta\"])\n self.gamma = float(cb[\"_cell_angle_gamma\"])\n\n\n def printout(self):\n\n print(\"alen:{:8} \\nblen:{:8} \\nclen:{:8} \\nalpha:{:8} \\nbeta: {:8} \\ngamma:{:8}\".format(self.alen,self.blen,self.clen,self.alpha,self.beta,self.gamma))\n\n\n\n\nclass Atom():\n\n def __init__(self,elid,sym,xpos,ypos,zpos):\n\n self.elid = elid\n self.sym = sym\n self.xpos = float(xpos)\n self.ypos = float(ypos)\n self.zpos = float(zpos)\n\n\n def printout(self):\n\n print(\"id:{:3} symbol:{:2} x:{:.4f} y:{:.4f} z:{:.4f}\".format(self.elid,self.sym,self.xpos,self.ypos,self.zpos))\n\n\n def drawObj(self,ftoc):\n size = sizedic[self.sym]*styledic[draw_style][0]+bond_radius*styledic[draw_style][1]\n bpy.ops.mesh.primitive_uv_sphere_add(segments=qualitydic[draw_quality],ring_count=qualitydic[draw_quality]/2,size=size,location=toCarth(ftoc,[self.xpos,self.ypos,self.zpos]))\n bpy.context.object.name = self.elid\n activeObject = bpy.context.active_object # Set active object to variable\n mat = bpy.data.materials.new(name=\"MaterialName\") # set new material to variable\n activeObject.data.materials.append(mat) # add the material to the object\n if(atom_name):\n bpy.context.object.show_name = True\n if(atom_color):\n bpy.context.object.active_material.diffuse_color = colordic[self.sym] # change color to dictionary color\n else:\n bpy.context.object.active_material.diffuse_color = [1,1,1] # change color to white\n\n\nclass sympos():\n\n def __init__(self,string):\n\n self.xsym = (string[0].split(','))[0]\n self.ysym = (string[0].split(','))[1]\n self.zsym = (string[0].split(','))[2]\n\n\n def printout(self):\n\n print(\"x:{:8} y:{:8} z:{:8}\".format(self.xsym,self.ysym,self.zsym))\n\n\n\ndef readEl(cb):\n\n elements = []\n previd = []\n idcnt = []\n lb = cb.GetLoop(\"_atom_site_label\")\n for el in lb:\n flag = False\n for i in range(len(previd)):\n if(el[0] == previd[i]):\n flag = True\n break\n if(flag):\n idcnt[i] += 1\n else:\n previd.append(el[0])\n idcnt.append(0)\n i = len(idcnt)-1\n id_t = \"{}.{}\".format(el[0],idcnt[i])\n elements.append(Atom(id_t,el[1],el[2],el[3],el[4]))\n return elements\n\n\ndef readPos(cb):\n\n positions = [];\n lb = cb.GetLoop(\"_symmetry_equiv_pos_as_xyz\")\n for el in lb:\n positions.append(sympos(el))\n return positions\n\n\ndef obabel_fill_unit_cell(cif_file, p1_file):\n\n # Convert symmetry to P1 using openbabel as subprocess\n # Notation: obabel [-i<input-type>] <infilename> [-o<output-type>] -O<outfilename> [Options]\n subprocess.run(['obabel', '-icif', cif_file, '-ocif', '-O', p1_file, '--fillUC', 'keepconnect'])\n\n\ndef calcDistance(ftoc,atom1,atom2):\n\n ac = toCarth(ftoc,[atom1.xpos,atom1.ypos,atom1.zpos])\n tc = toCarth(ftoc,[atom2.xpos,atom2.ypos,atom2.zpos])\n dx = tc[0] - ac[0]\n dy = tc[1] - ac[1]\n dz = tc[2] - ac[2]\n dist = np.sqrt(dx**2 + dy**2 + dz**2)\n return dist\n\n\ndef toCarth(ftoc,V_frac):\n\n return np.dot(ftoc, V_frac)\n\n\ndef look_at(obj_camera, point):\n\n loc_camera = obj_camera.matrix_world.to_translation()\n direction = point - loc_camera\n # point the cameras '-Z' and use its 'Y' as up\n rot_quat = direction.to_track_quat('-Z', 'Y')\n # assume we're using euler rotation\n obj_camera.rotation_euler = rot_quat.to_euler()\n\n\ndef addCamera(x,y,z):\n\n bpy.ops.object.camera_add(view_align=True, enter_editmode=False, location=(5*x,5*y,5*z))\n print(\"camera added\")\n bpy.ops.object.light_add(type='SUN', view_align=False, location=(0, 0, 0))\n obj_camera = bpy.data.objects[\"Camera\"]\n look_at(obj_camera, Vector([0,0,z/4]))\n obj_camera.data.type = 'ORTHO'\n obj_camera.data.ortho_scale = ((x+y+z))\n\n\ndef clearWS():\n\n if 'OBJECT'!=bpy.context.mode:\n bpy.ops.object.mode_set(mode='OBJECT')\n bpy.ops.object.select_all(action='SELECT')\n bpy.ops.object.delete(use_global=False)\n # remove all previous curves\n for i in bpy.data.curves:\n bpy.data.curves.remove(i)\n # remove all previous materials\n for m in bpy.data.materials:\n bpy.data.materials.remove(m)\n # remove all previous camera's\n for c in bpy.data.cameras:\n bpy.data.cameras.remove(c)\n\n print(\"Workspace cleared.\")\n return\n\n\ndef drawCrystal(file):\n # Check if file is file:\n S = time.time()\n global user_feedback\n ext = file[len(file)-4:]\n if(ext.lower() != \".cif\"):\n print(\"Only cif files can be visualised\")\n user_feedback = \"Not a cif file\"\n return\n # Check OpenBabel installation\n try:\n # Convert the cif file to its P1 symmetry notation as a temporary cif file\n print('Converting %s to P1' %file)\n obabel_fill_unit_cell(file, \"temp.CIF\")\n cf = CifFile(\"temp.CIF\")\n except:\n print(\"No OpenBabel installation found, install it from http://openbabel.org/wiki/Category:Installation\")\n user_feedback = \"OpenBabel not installed\"\n #cf = CifFile(file) CifFile apparently can't read in long filepaths\n return\n # Open and parse our cif\n f = file.rsplit(dir_sep, 1)[-1]\n F = f[:3]\n print(f)\n cb = cf.first_block()\n Crystal = Crysdata(F,cb)\n\n # Print crystal data in terminal if checked\n if(print_data):\n Crystal.printout()\n\n print(\"Crystal data read after \"+ str(time.time() - S) + \" seconds\")\n\n # Draw crystal if in Blender environment\n if(Blender_env):\n clearWS()\n Crystal.drawCrystal()\n bpy.ops.object.select_all(action='DESELECT')\n if(add_camera):\n addCamera(Crystal.cell.alen,Crystal.cell.blen,Crystal.cell.clen)\n",
"step-ids": [
20,
32,
41,
42,
51
]
}
|
[
20,
32,
41,
42,
51
] |
"""
1. Если в строке больше символов в нижнем регистре - вывести все в нижнем,
если больше в верхнем - вывести все в верхнем,
если поровну - вывести в противоположных регистрах.
2. Если в строке каждое слово начинается с заглавной буквы, тогда
добавить в начало строки 'done. '.
Иначе заменить первые 5 элементов строки на 'draft: '.
(можно использовать метод replace и/или конкатенацию строк + срезы)
3. Если длина строки больше 20, то обрезать лишние символы до 20.
Иначе дополнить строку символами '@' до длины 20.
(можно использовать метод ljust либо конкатенацию и дублирование (+ и *))
После выполнения кажого пункта выводить результат типа:
1. Исходная строка: "some string".
Результат: "some edited string".
(Использовать форматирование строк f либо метод format)
"""
string = 'Lorem, Ipsum, Is, SImPlY, DuMMy, TEXT, Of, The, Printing, INDUSTRY.'
upp_regist = low_regist = title_regist = 0
for char in string:
if char.isupper():
upp_regist += 1
elif char.islower():
low_regist += 1
print('Some string:', string)
if upp_regist > low_regist:
print('Some edited string:', string.upper())
elif low_regist > upp_regist:
print('Some edited string:', string.lower())
else:
print('Some edited string:', string.swapcase())
print('Some string:', string)
if string.istitle():
print('Some edited string: done. ' + string)
else:
print('Some edited string:', string.replace('Lorem', 'draft: '))
print('Some string:', string)
if len(string) > 20:
string = string[:20]
print('Some edited string:', string)
else:
print('Some edited string:', string.ljust(20, '@'))
|
normal
|
{
"blob_id": "c7c405535b2ca656d4d5f18013e3e2fdef70efea",
"index": 8088,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor char in string:\n if char.isupper():\n upp_regist += 1\n elif char.islower():\n low_regist += 1\nprint('Some string:', string)\nif upp_regist > low_regist:\n print('Some edited string:', string.upper())\nelif low_regist > upp_regist:\n print('Some edited string:', string.lower())\nelse:\n print('Some edited string:', string.swapcase())\nprint('Some string:', string)\nif string.istitle():\n print('Some edited string: done. ' + string)\nelse:\n print('Some edited string:', string.replace('Lorem', 'draft: '))\nprint('Some string:', string)\nif len(string) > 20:\n string = string[:20]\n print('Some edited string:', string)\nelse:\n print('Some edited string:', string.ljust(20, '@'))\n",
"step-3": "<mask token>\nstring = 'Lorem, Ipsum, Is, SImPlY, DuMMy, TEXT, Of, The, Printing, INDUSTRY.'\nupp_regist = low_regist = title_regist = 0\nfor char in string:\n if char.isupper():\n upp_regist += 1\n elif char.islower():\n low_regist += 1\nprint('Some string:', string)\nif upp_regist > low_regist:\n print('Some edited string:', string.upper())\nelif low_regist > upp_regist:\n print('Some edited string:', string.lower())\nelse:\n print('Some edited string:', string.swapcase())\nprint('Some string:', string)\nif string.istitle():\n print('Some edited string: done. ' + string)\nelse:\n print('Some edited string:', string.replace('Lorem', 'draft: '))\nprint('Some string:', string)\nif len(string) > 20:\n string = string[:20]\n print('Some edited string:', string)\nelse:\n print('Some edited string:', string.ljust(20, '@'))\n",
"step-4": "\"\"\"\n 1. Если в строке больше символов в нижнем регистре - вывести все в нижнем,\n если больше в верхнем - вывести все в верхнем,\n если поровну - вывести в противоположных регистрах.\n 2. Если в строке каждое слово начинается с заглавной буквы, тогда\n добавить в начало строки 'done. '.\n Иначе заменить первые 5 элементов строки на 'draft: '.\n (можно использовать метод replace и/или конкатенацию строк + срезы)\n 3. Если длина строки больше 20, то обрезать лишние символы до 20.\n Иначе дополнить строку символами '@' до длины 20.\n (можно использовать метод ljust либо конкатенацию и дублирование (+ и *))\n После выполнения кажого пункта выводить результат типа:\n 1. Исходная строка: \"some string\".\n Результат: \"some edited string\".\n (Использовать форматирование строк f либо метод format)\n\"\"\"\n\nstring = 'Lorem, Ipsum, Is, SImPlY, DuMMy, TEXT, Of, The, Printing, INDUSTRY.'\n\nupp_regist = low_regist = title_regist = 0\n\nfor char in string:\n if char.isupper():\n upp_regist += 1\n elif char.islower():\n low_regist += 1\n\nprint('Some string:', string)\nif upp_regist > low_regist:\n print('Some edited string:', string.upper())\nelif low_regist > upp_regist:\n print('Some edited string:', string.lower())\nelse:\n print('Some edited string:', string.swapcase())\n\nprint('Some string:', string)\nif string.istitle():\n print('Some edited string: done. ' + string)\nelse:\n print('Some edited string:', string.replace('Lorem', 'draft: '))\n\nprint('Some string:', string)\nif len(string) > 20:\n string = string[:20]\n print('Some edited string:', string)\nelse:\n print('Some edited string:', string.ljust(20, '@'))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('valor de D: %.4f' % D)
print('valor de Rey: %.4f' % Rey)
print('valor de k: %.4f' % k)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
f = float(input('Digite o valor de f: '))
L = float(input('Digite o valor de L: '))
Q = float(input('Digite o valor de Q: '))
DeltaH = float(input('Digite o valor de DeltaH: '))
v = float(input('Digite o valor de v: '))
g = 9.81
e = 2e-06
D = (8 * f * L * Q * Q / (math.pi * math.pi * g * DeltaH)) ** 0.2
Rey = 4 * Q / (math.pi * D * v)
k = 0.25 / math.log10(e / (3.7 * D) + 5.74 / Rey ** 0.9) ** 2
print('valor de D: %.4f' % D)
print('valor de Rey: %.4f' % Rey)
print('valor de k: %.4f' % k)
<|reserved_special_token_1|>
import math
f = float(input('Digite o valor de f: '))
L = float(input('Digite o valor de L: '))
Q = float(input('Digite o valor de Q: '))
DeltaH = float(input('Digite o valor de DeltaH: '))
v = float(input('Digite o valor de v: '))
g = 9.81
e = 2e-06
D = (8 * f * L * Q * Q / (math.pi * math.pi * g * DeltaH)) ** 0.2
Rey = 4 * Q / (math.pi * D * v)
k = 0.25 / math.log10(e / (3.7 * D) + 5.74 / Rey ** 0.9) ** 2
print('valor de D: %.4f' % D)
print('valor de Rey: %.4f' % Rey)
print('valor de k: %.4f' % k)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
import math
#COMECE SEU CÓDIGO AQUI
f = float(input('Digite o valor de f: '))
L = float(input('Digite o valor de L: '))
Q = float(input('Digite o valor de Q: '))
DeltaH = float(input('Digite o valor de DeltaH: '))
v = float(input('Digite o valor de v: '))
g = 9.81
e = 0.000002
#PROCESSAMENTO
D = ((8*f*L*Q*Q)/(math.pi*math.pi*g*DeltaH))**(0.2)
#CÁLCULO DO Rey
Rey = (4*Q)/(math.pi*D*v)
#CÁLCULO DO k
k = ((0.25)/((math.log10(((e)/(3.7*D)) + ((5.74)/(Rey**0.9))))**2))
#SAÍDA
print('valor de D: %.4f' %D)
print('valor de Rey: %.4f' %Rey)
print('valor de k: %.4f' %k)
|
flexible
|
{
"blob_id": "b6183daa943cc63fd2959e3e54fc1e6af5d761de",
"index": 202,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('valor de D: %.4f' % D)\nprint('valor de Rey: %.4f' % Rey)\nprint('valor de k: %.4f' % k)\n",
"step-3": "<mask token>\nf = float(input('Digite o valor de f: '))\nL = float(input('Digite o valor de L: '))\nQ = float(input('Digite o valor de Q: '))\nDeltaH = float(input('Digite o valor de DeltaH: '))\nv = float(input('Digite o valor de v: '))\ng = 9.81\ne = 2e-06\nD = (8 * f * L * Q * Q / (math.pi * math.pi * g * DeltaH)) ** 0.2\nRey = 4 * Q / (math.pi * D * v)\nk = 0.25 / math.log10(e / (3.7 * D) + 5.74 / Rey ** 0.9) ** 2\nprint('valor de D: %.4f' % D)\nprint('valor de Rey: %.4f' % Rey)\nprint('valor de k: %.4f' % k)\n",
"step-4": "import math\nf = float(input('Digite o valor de f: '))\nL = float(input('Digite o valor de L: '))\nQ = float(input('Digite o valor de Q: '))\nDeltaH = float(input('Digite o valor de DeltaH: '))\nv = float(input('Digite o valor de v: '))\ng = 9.81\ne = 2e-06\nD = (8 * f * L * Q * Q / (math.pi * math.pi * g * DeltaH)) ** 0.2\nRey = 4 * Q / (math.pi * D * v)\nk = 0.25 / math.log10(e / (3.7 * D) + 5.74 / Rey ** 0.9) ** 2\nprint('valor de D: %.4f' % D)\nprint('valor de Rey: %.4f' % Rey)\nprint('valor de k: %.4f' % k)\n",
"step-5": "# -*- coding: utf-8 -*-\nimport math\n#COMECE SEU CÓDIGO AQUI\nf = float(input('Digite o valor de f: '))\nL = float(input('Digite o valor de L: '))\nQ = float(input('Digite o valor de Q: '))\nDeltaH = float(input('Digite o valor de DeltaH: '))\nv = float(input('Digite o valor de v: '))\ng = 9.81\ne = 0.000002\n#PROCESSAMENTO\nD = ((8*f*L*Q*Q)/(math.pi*math.pi*g*DeltaH))**(0.2)\n#CÁLCULO DO Rey\nRey = (4*Q)/(math.pi*D*v)\n#CÁLCULO DO k\nk = ((0.25)/((math.log10(((e)/(3.7*D)) + ((5.74)/(Rey**0.9))))**2))\n#SAÍDA\nprint('valor de D: %.4f' %D)\nprint('valor de Rey: %.4f' %Rey)\nprint('valor de k: %.4f' %k)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def sftp_connection():
while True:
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
try:
with pysftp.Connection('sb-emea.avl.com', username=
'[email protected]', password='AvlAvl2931!!',
cnopts=cnopts) as sftp:
print('connection has been established')
remotepath = '/Cummins_CTCI_NB/sftp_image_test/'
while True:
remotepath = '/Cummins_CTCI_NB/sftp_image_test/'
try:
if sftp.exists(remotepath):
print('hi')
time.sleep(5)
print('hello')
time.sleep(5)
except:
print('connection/ssherror exception')
break
except:
print('connection has been breaked')
time.sleep(5)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def sftp_connection():
while True:
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
try:
with pysftp.Connection('sb-emea.avl.com', username=
'[email protected]', password='AvlAvl2931!!',
cnopts=cnopts) as sftp:
print('connection has been established')
remotepath = '/Cummins_CTCI_NB/sftp_image_test/'
while True:
remotepath = '/Cummins_CTCI_NB/sftp_image_test/'
try:
if sftp.exists(remotepath):
print('hi')
time.sleep(5)
print('hello')
time.sleep(5)
except:
print('connection/ssherror exception')
break
except:
print('connection has been breaked')
time.sleep(5)
if __name__ == '__main__':
t1 = threading.Thread(target=sftp_connection)
t1.start()
<|reserved_special_token_1|>
import pysftp
import time
import threading
def sftp_connection():
while True:
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
try:
with pysftp.Connection('sb-emea.avl.com', username=
'[email protected]', password='AvlAvl2931!!',
cnopts=cnopts) as sftp:
print('connection has been established')
remotepath = '/Cummins_CTCI_NB/sftp_image_test/'
while True:
remotepath = '/Cummins_CTCI_NB/sftp_image_test/'
try:
if sftp.exists(remotepath):
print('hi')
time.sleep(5)
print('hello')
time.sleep(5)
except:
print('connection/ssherror exception')
break
except:
print('connection has been breaked')
time.sleep(5)
if __name__ == '__main__':
t1 = threading.Thread(target=sftp_connection)
t1.start()
<|reserved_special_token_1|>
import pysftp
import time
import threading
def sftp_connection():
while True:
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
try:
with pysftp.Connection('sb-emea.avl.com', username='[email protected]', password='AvlAvl2931!!',
cnopts=cnopts) as sftp:
print('connection has been established')
remotepath = '/Cummins_CTCI_NB/sftp_image_test/'
while True:
remotepath = '/Cummins_CTCI_NB/sftp_image_test/'
try:
if sftp.exists(remotepath):
print('hi')
time.sleep(5)
print('hello')
time.sleep(5)
except:
print('connection/ssherror exception')
break
except:
print('connection has been breaked')
time.sleep(5)
if __name__ == "__main__":
t1 = threading.Thread(target=sftp_connection)
t1.start()
|
flexible
|
{
"blob_id": "676ccbac9385a4b63d599c3f85f16e28d839e9b8",
"index": 3731,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef sftp_connection():\n while True:\n cnopts = pysftp.CnOpts()\n cnopts.hostkeys = None\n try:\n with pysftp.Connection('sb-emea.avl.com', username=\n '[email protected]', password='AvlAvl2931!!',\n cnopts=cnopts) as sftp:\n print('connection has been established')\n remotepath = '/Cummins_CTCI_NB/sftp_image_test/'\n while True:\n remotepath = '/Cummins_CTCI_NB/sftp_image_test/'\n try:\n if sftp.exists(remotepath):\n print('hi')\n time.sleep(5)\n print('hello')\n time.sleep(5)\n except:\n print('connection/ssherror exception')\n break\n except:\n print('connection has been breaked')\n time.sleep(5)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef sftp_connection():\n while True:\n cnopts = pysftp.CnOpts()\n cnopts.hostkeys = None\n try:\n with pysftp.Connection('sb-emea.avl.com', username=\n '[email protected]', password='AvlAvl2931!!',\n cnopts=cnopts) as sftp:\n print('connection has been established')\n remotepath = '/Cummins_CTCI_NB/sftp_image_test/'\n while True:\n remotepath = '/Cummins_CTCI_NB/sftp_image_test/'\n try:\n if sftp.exists(remotepath):\n print('hi')\n time.sleep(5)\n print('hello')\n time.sleep(5)\n except:\n print('connection/ssherror exception')\n break\n except:\n print('connection has been breaked')\n time.sleep(5)\n\n\nif __name__ == '__main__':\n t1 = threading.Thread(target=sftp_connection)\nt1.start()\n",
"step-4": "import pysftp\nimport time\nimport threading\n\n\ndef sftp_connection():\n while True:\n cnopts = pysftp.CnOpts()\n cnopts.hostkeys = None\n try:\n with pysftp.Connection('sb-emea.avl.com', username=\n '[email protected]', password='AvlAvl2931!!',\n cnopts=cnopts) as sftp:\n print('connection has been established')\n remotepath = '/Cummins_CTCI_NB/sftp_image_test/'\n while True:\n remotepath = '/Cummins_CTCI_NB/sftp_image_test/'\n try:\n if sftp.exists(remotepath):\n print('hi')\n time.sleep(5)\n print('hello')\n time.sleep(5)\n except:\n print('connection/ssherror exception')\n break\n except:\n print('connection has been breaked')\n time.sleep(5)\n\n\nif __name__ == '__main__':\n t1 = threading.Thread(target=sftp_connection)\nt1.start()\n",
"step-5": "import pysftp\nimport time\nimport threading\n\ndef sftp_connection():\n while True:\n cnopts = pysftp.CnOpts()\n cnopts.hostkeys = None\n try:\n with pysftp.Connection('sb-emea.avl.com', username='[email protected]', password='AvlAvl2931!!',\n cnopts=cnopts) as sftp:\n print('connection has been established')\n remotepath = '/Cummins_CTCI_NB/sftp_image_test/'\n\n while True:\n remotepath = '/Cummins_CTCI_NB/sftp_image_test/'\n try:\n\n if sftp.exists(remotepath):\n\n\n print('hi')\n time.sleep(5)\n print('hello')\n time.sleep(5)\n except:\n print('connection/ssherror exception')\n break\n except:\n print('connection has been breaked')\n time.sleep(5)\n\nif __name__ == \"__main__\":\n t1 = threading.Thread(target=sftp_connection)\nt1.start()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def preprocessing_terms(termlist, lang_in, timeEx, patternBasedClean,
pluralClean, numbersClean, accentClean):
date = '2020-06-03'
print('terms:', termlist)
print('lang:', lang_in)
processedTerms = clean_terms(termlist, lang_in)
print('This is processedTerms ')
print(processedTerms)
if timeEx == True:
processedTerms = '| '.join(processedTerms).replace('-', '').replace(','
, '').replace(';', '')
processedTerms = annotate_timex(processedTerms, date, lang_in)
processedTerms.sort()
if lang_in == 'es' and patternBasedClean == True:
stanza.download('es')
pos_tagger = stanza.Pipeline('es')
processedTerms = delete_pattern(processedTerms, pos_tagger)
if lang_in == 'en' and patternBasedClean == True:
stanza.download('en')
pos_tagger = stanza.Pipeline('en')
processedTerms = delete_pattern(processedTerms, pos_tagger)
if lang_in == 'es' and pluralClean == True:
processedTerms = quit_plural(processedTerms)
if numbersClean == True:
processedTerms = delete_numbers(processedTerms)
if accentClean == True:
processedTerms = acentos(processedTerms)
processedTerms = clean_terms(processedTerms, lang_in)
return processedTerms
def clean_terms(termlist, lang_in):
start_time = time()
if lang_in == 'es':
stop = stopwords.words('spanish')
file = open(sw_spanish, 'r', encoding='utf-8')
mystop = file.readlines()
elif lang_in == 'en':
stop = stopwords.words('english')
file = open(sw_english, 'r', encoding='utf-8')
mystop = file.readlines()
clean_list = []
cont = 0
for i in mystop:
stop.append(i.strip())
deletes = []
for i in termlist:
k = i.strip(',.:')
if k.lower() in stop or k in stop:
deletes.append(k)
elif k.lower() not in stop or k not in stop:
clean_list.append(k.replace(',', '').replace('-', ''))
print(deletes)
cont = len(termlist) - len(clean_list)
elapsed_time = time() - start_time
txt = 'CLEAN_TERMS, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len
(clean_list)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
print('CLEAN_TERMS, DELETE', cont, len(clean_list), elapsed_time)
return clean_list
def annotate_timex(text, date, lang):
f = open('texto.txt', 'w')
f.write(text)
textanotador2 = ''
start_time = time()
url = 'https://annotador.oeg.fi.upm.es/annotate'
params = ('{"inputText":"' + text +
'","inputDate":"","domain":"legal","lan":"' + lang +
'","format":"timex3"}')
headers = {'Content-Type': 'application/json;charset=utf-8'}
response = requests.request('POST', url, headers=headers, data=params.
encode('utf8'))
textanotador = response.text
print('ENTRA ANOTADOR')
print(textanotador)
code = response.status_code
list_anotador = textanotador.split('|')
print(list_anotador)
deletes = []
cont = 0
for i in list_anotador:
if '<' in i and len(i) > 2:
cont = cont + 1
deletes.append(i)
ind = list_anotador.index(i)
list_anotador.pop(ind)
for i in list_anotador:
if '<' in i and len(i) > 2:
print(i)
cont = cont + 1
deletes.append(i)
ind = list_anotador.index(i)
list_anotador.pop(ind)
anotador = []
for i in list_anotador:
anotador.append(i.strip().replace(',', ''))
if code != 200:
print(
'WARNING: Annotador is down. Temporal expressions could not be removed.'
)
anotador = text.split('| ')
conts_log.error(
'Annotador is down. Temporal expressions could not be removed.',
code)
else:
elapsed_time = time() - start_time
txt = 'AÑOTADOR, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len
(anotador)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
print('AÑOTADOR DELETE', cont, len(anotador), elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return anotador
def infinitive(verb):
if verb[-2:] == 'ar' or verb[-2:] == 'er' or verb[-2:] == 'ir':
verb = verb
else:
if verb[-2:] == 'rá':
verb = verb[:-1]
if verb[-2:] == 'án':
verb = verb[:-2]
if verb[-2:] == 'ré':
verb = verb[:-1]
return verb
def delete_pattern(anotador, pos_tagger):
total = 0
deletes = []
start_time = time()
lemmas_list = []
cont = 0
cont_inf = 0
cont_post = 0
for i in anotador:
print('this is i')
print(i)
if len(i) > 1:
doc = pos_tagger(i)
sent = doc.sentences[0]
word = sent.words
tag = []
for token in word:
pos = token.upos
term = token.text
tupla = term, pos
tag.append(tupla)
print(token.text)
print(pos)
print('this is tag ')
print(tag)
total = total + 1
joini = i
list_pos = []
spl = joini.split(' ')
if joini != '':
join_tag = ''
for t in tag:
print('this is t')
print(t)
if t[1] == 'AUX':
doc = nlp(t[0])
lemlist = [tok.lemma_ for tok in doc]
lem = ''.join(lemlist)
lemmas_list.append(lem)
if lem == i:
lem = t[0]
list_pos.append('aux--' + str(lem))
if len(spl) == 1:
ind = anotador.index(str(i))
anotador[ind] = str(lem)
if t[1] == 'NOUN':
list_pos.append('noun-' + str(t[0]))
if t[1] == 'VERB':
cont_inf = cont_inf + 1
doc = nlp(t[0])
for tok in doc:
l = tok.lemma_
if l != t[0]:
cont_post = cont_post + 1
lemlist = [tok.lemma_ for tok in doc]
lem = ''.join(lemlist)
lemmas_list.append(lem)
if lem == i:
lem = t[0]
list_pos.append('verb-' + str(lem))
if len(spl) == 1:
ind = anotador.index(str(i))
anotador[ind] = str(lem)
if t[1] == 'ADV':
list_pos.append('adv--' + str(t[0]))
if t[1] == 'ADJ':
list_pos.append('adj--' + str(t[0]))
if t[1] == 'SCONJ':
list_pos.append('sconj' + str(t[0]))
spl_i = joini.split(' ')
if len(list_pos) == 1:
pos1 = list_pos[0]
if pos1[0:4] == 'adv-':
term = pos1[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
elif len(list_pos) == 2 and len(spl_i) == 2:
pos1 = list_pos[0]
pos2 = list_pos[1]
term = ''
if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'adj-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
elif len(list_pos) == 3 and len(spl_i) == 3:
pos1 = list_pos[0]
pos2 = list_pos[1]
pos3 = list_pos[2]
term = ''
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'verb':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'verb':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4
] == 'verb':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4
] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'adj-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'noun' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4
] == 'scon' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'scon' and pos3[0:4
] == 'adv-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'verb' and pos3[0:4
] == 'verb' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
for i in deletes:
if i in anotador:
ind = anotador.index(i)
anotador.pop(ind)
elapsed_time = time() - start_time
txt = 'PATRONES, DELETE' + ' (' + str(cont) + ') NEW LIST SIZE: (' + str(
len(anotador)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
print('PATRONES DELETE', cont, len(anotador), elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return anotador
def quit_plural(valuelist):
start_time = time()
file = open('./data/numberlist_es', 'r', encoding='utf-8')
read = file.readlines()
plural = []
cont = 0
for i in valuelist:
ind = valuelist.index(i)
term = i.replace(',', '').replace('-', ' ')
valuelist[ind] = term
plu = ''
if 'es' in term[-2:] or 's' in term[-1:]:
slp = term.split(' ')
for n in read:
if n[:-1] in slp:
plu = i
if not len(plu):
for j in slp:
if 'es' in j[-2:] and 't' not in j[-3:-2] and 'l' not in j[
-3:-2] or 'les' in j[-3:]:
plu += ' ' + j[:-2]
if 'on' in plu[-2:]:
plu = ' ' + plu[:-2] + 'ón'
if 'v' in plu[-1:]:
plu = ' ' + plu + 'e'
if 'bl' in plu[-2:]:
plu = ' ' + plu + 'e'
if 'br' in plu[-2:]:
plu = ' ' + plu + 'e'
elif 's' in j[-1:]:
plu += ' ' + j[:-1]
pos = slp.index(j)
if pos > 0:
bef = slp[0]
if 'n' in bef[-1:] and 'ón' not in bef[-2:]:
splb = plu.split(' ')
firts = splb[1]
if 'n' not in firts[-1:]:
pass
else:
plu0 = firts[:-1]
join1 = ' '.join(splb[2:])
plu = plu0 + ' ' + join1
else:
plu += ' ' + j
ind = valuelist.index(term)
valuelist[ind] = plu.strip()
cont = cont + 1
quit_plu = []
nuevalista = set(valuelist)
for i in nuevalista:
quit_plu.append(i)
deletes = []
new = []
for i in valuelist:
if i not in new:
new.append(i)
else:
deletes.append(i)
elapsed_time = time() - start_time
txt = 'PLURAL, DELETE' + ' (' + str(len(valuelist) - len(quit_plu)
) + ') NEW LIST SIZE: (' + str(len(quit_plu)) + ') TIME: (' + str(
elapsed_time) + ')'
joind = ', '.join(deletes)
print('PLURALES DELETE', len(valuelist) - len(quit_plu), len(quit_plu),
elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return quit_plu
<|reserved_special_token_0|>
def quit_tilds(s):
replacements = ('á', 'a'), ('é', 'e'), ('í', 'i'), ('ó', 'o'), ('ú', 'u')
for a, b in replacements:
s = s.replace(a, b)
return s
<|reserved_special_token_0|>
def main(read, lang_in):
start_time = time()
text = readFile(read)
date = '2020-06-03'
lang = lang_in
termlist = text.split('| ')
print('RECIBE', termlist)
clean_text = clean_terms(termlist, lang_in)
join_clean_text = '| '.join(clean_text).replace('-', '').replace(',', ''
).replace(';', '')
anotador = annotate_timex(join_clean_text, date, lang)
anotador.sort()
if lang_in == 'es':
pattern = delete_pattern(anotador)
plural = quit_plural(pattern)
numbers = delete_numbers(plural)
tildes = acentos(numbers)
stop2 = clean_terms(tildes, lang_in)
print('FINALES', stop2)
"""new=open('../data/clean_terms_freq4.txt', 'w')#se imprime lo que se queda
for i in stop2:
new.write(i+'
')
new.close()
elapsed_time=time()-start_time
print('Main', elapsed_time)
return(stop2)"""
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def preprocessing_terms(termlist, lang_in, timeEx, patternBasedClean,
pluralClean, numbersClean, accentClean):
date = '2020-06-03'
print('terms:', termlist)
print('lang:', lang_in)
processedTerms = clean_terms(termlist, lang_in)
print('This is processedTerms ')
print(processedTerms)
if timeEx == True:
processedTerms = '| '.join(processedTerms).replace('-', '').replace(','
, '').replace(';', '')
processedTerms = annotate_timex(processedTerms, date, lang_in)
processedTerms.sort()
if lang_in == 'es' and patternBasedClean == True:
stanza.download('es')
pos_tagger = stanza.Pipeline('es')
processedTerms = delete_pattern(processedTerms, pos_tagger)
if lang_in == 'en' and patternBasedClean == True:
stanza.download('en')
pos_tagger = stanza.Pipeline('en')
processedTerms = delete_pattern(processedTerms, pos_tagger)
if lang_in == 'es' and pluralClean == True:
processedTerms = quit_plural(processedTerms)
if numbersClean == True:
processedTerms = delete_numbers(processedTerms)
if accentClean == True:
processedTerms = acentos(processedTerms)
processedTerms = clean_terms(processedTerms, lang_in)
return processedTerms
def clean_terms(termlist, lang_in):
start_time = time()
if lang_in == 'es':
stop = stopwords.words('spanish')
file = open(sw_spanish, 'r', encoding='utf-8')
mystop = file.readlines()
elif lang_in == 'en':
stop = stopwords.words('english')
file = open(sw_english, 'r', encoding='utf-8')
mystop = file.readlines()
clean_list = []
cont = 0
for i in mystop:
stop.append(i.strip())
deletes = []
for i in termlist:
k = i.strip(',.:')
if k.lower() in stop or k in stop:
deletes.append(k)
elif k.lower() not in stop or k not in stop:
clean_list.append(k.replace(',', '').replace('-', ''))
print(deletes)
cont = len(termlist) - len(clean_list)
elapsed_time = time() - start_time
txt = 'CLEAN_TERMS, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len
(clean_list)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
print('CLEAN_TERMS, DELETE', cont, len(clean_list), elapsed_time)
return clean_list
def annotate_timex(text, date, lang):
f = open('texto.txt', 'w')
f.write(text)
textanotador2 = ''
start_time = time()
url = 'https://annotador.oeg.fi.upm.es/annotate'
params = ('{"inputText":"' + text +
'","inputDate":"","domain":"legal","lan":"' + lang +
'","format":"timex3"}')
headers = {'Content-Type': 'application/json;charset=utf-8'}
response = requests.request('POST', url, headers=headers, data=params.
encode('utf8'))
textanotador = response.text
print('ENTRA ANOTADOR')
print(textanotador)
code = response.status_code
list_anotador = textanotador.split('|')
print(list_anotador)
deletes = []
cont = 0
for i in list_anotador:
if '<' in i and len(i) > 2:
cont = cont + 1
deletes.append(i)
ind = list_anotador.index(i)
list_anotador.pop(ind)
for i in list_anotador:
if '<' in i and len(i) > 2:
print(i)
cont = cont + 1
deletes.append(i)
ind = list_anotador.index(i)
list_anotador.pop(ind)
anotador = []
for i in list_anotador:
anotador.append(i.strip().replace(',', ''))
if code != 200:
print(
'WARNING: Annotador is down. Temporal expressions could not be removed.'
)
anotador = text.split('| ')
conts_log.error(
'Annotador is down. Temporal expressions could not be removed.',
code)
else:
elapsed_time = time() - start_time
txt = 'AÑOTADOR, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len
(anotador)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
print('AÑOTADOR DELETE', cont, len(anotador), elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return anotador
def infinitive(verb):
if verb[-2:] == 'ar' or verb[-2:] == 'er' or verb[-2:] == 'ir':
verb = verb
else:
if verb[-2:] == 'rá':
verb = verb[:-1]
if verb[-2:] == 'án':
verb = verb[:-2]
if verb[-2:] == 'ré':
verb = verb[:-1]
return verb
def delete_pattern(anotador, pos_tagger):
total = 0
deletes = []
start_time = time()
lemmas_list = []
cont = 0
cont_inf = 0
cont_post = 0
for i in anotador:
print('this is i')
print(i)
if len(i) > 1:
doc = pos_tagger(i)
sent = doc.sentences[0]
word = sent.words
tag = []
for token in word:
pos = token.upos
term = token.text
tupla = term, pos
tag.append(tupla)
print(token.text)
print(pos)
print('this is tag ')
print(tag)
total = total + 1
joini = i
list_pos = []
spl = joini.split(' ')
if joini != '':
join_tag = ''
for t in tag:
print('this is t')
print(t)
if t[1] == 'AUX':
doc = nlp(t[0])
lemlist = [tok.lemma_ for tok in doc]
lem = ''.join(lemlist)
lemmas_list.append(lem)
if lem == i:
lem = t[0]
list_pos.append('aux--' + str(lem))
if len(spl) == 1:
ind = anotador.index(str(i))
anotador[ind] = str(lem)
if t[1] == 'NOUN':
list_pos.append('noun-' + str(t[0]))
if t[1] == 'VERB':
cont_inf = cont_inf + 1
doc = nlp(t[0])
for tok in doc:
l = tok.lemma_
if l != t[0]:
cont_post = cont_post + 1
lemlist = [tok.lemma_ for tok in doc]
lem = ''.join(lemlist)
lemmas_list.append(lem)
if lem == i:
lem = t[0]
list_pos.append('verb-' + str(lem))
if len(spl) == 1:
ind = anotador.index(str(i))
anotador[ind] = str(lem)
if t[1] == 'ADV':
list_pos.append('adv--' + str(t[0]))
if t[1] == 'ADJ':
list_pos.append('adj--' + str(t[0]))
if t[1] == 'SCONJ':
list_pos.append('sconj' + str(t[0]))
spl_i = joini.split(' ')
if len(list_pos) == 1:
pos1 = list_pos[0]
if pos1[0:4] == 'adv-':
term = pos1[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
elif len(list_pos) == 2 and len(spl_i) == 2:
pos1 = list_pos[0]
pos2 = list_pos[1]
term = ''
if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'adj-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
elif len(list_pos) == 3 and len(spl_i) == 3:
pos1 = list_pos[0]
pos2 = list_pos[1]
pos3 = list_pos[2]
term = ''
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'verb':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'verb':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4
] == 'verb':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4
] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'adj-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'noun' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4
] == 'scon' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'scon' and pos3[0:4
] == 'adv-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'verb' and pos3[0:4
] == 'verb' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
for i in deletes:
if i in anotador:
ind = anotador.index(i)
anotador.pop(ind)
elapsed_time = time() - start_time
txt = 'PATRONES, DELETE' + ' (' + str(cont) + ') NEW LIST SIZE: (' + str(
len(anotador)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
print('PATRONES DELETE', cont, len(anotador), elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return anotador
def quit_plural(valuelist):
start_time = time()
file = open('./data/numberlist_es', 'r', encoding='utf-8')
read = file.readlines()
plural = []
cont = 0
for i in valuelist:
ind = valuelist.index(i)
term = i.replace(',', '').replace('-', ' ')
valuelist[ind] = term
plu = ''
if 'es' in term[-2:] or 's' in term[-1:]:
slp = term.split(' ')
for n in read:
if n[:-1] in slp:
plu = i
if not len(plu):
for j in slp:
if 'es' in j[-2:] and 't' not in j[-3:-2] and 'l' not in j[
-3:-2] or 'les' in j[-3:]:
plu += ' ' + j[:-2]
if 'on' in plu[-2:]:
plu = ' ' + plu[:-2] + 'ón'
if 'v' in plu[-1:]:
plu = ' ' + plu + 'e'
if 'bl' in plu[-2:]:
plu = ' ' + plu + 'e'
if 'br' in plu[-2:]:
plu = ' ' + plu + 'e'
elif 's' in j[-1:]:
plu += ' ' + j[:-1]
pos = slp.index(j)
if pos > 0:
bef = slp[0]
if 'n' in bef[-1:] and 'ón' not in bef[-2:]:
splb = plu.split(' ')
firts = splb[1]
if 'n' not in firts[-1:]:
pass
else:
plu0 = firts[:-1]
join1 = ' '.join(splb[2:])
plu = plu0 + ' ' + join1
else:
plu += ' ' + j
ind = valuelist.index(term)
valuelist[ind] = plu.strip()
cont = cont + 1
quit_plu = []
nuevalista = set(valuelist)
for i in nuevalista:
quit_plu.append(i)
deletes = []
new = []
for i in valuelist:
if i not in new:
new.append(i)
else:
deletes.append(i)
elapsed_time = time() - start_time
txt = 'PLURAL, DELETE' + ' (' + str(len(valuelist) - len(quit_plu)
) + ') NEW LIST SIZE: (' + str(len(quit_plu)) + ') TIME: (' + str(
elapsed_time) + ')'
joind = ', '.join(deletes)
print('PLURALES DELETE', len(valuelist) - len(quit_plu), len(quit_plu),
elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return quit_plu
<|reserved_special_token_0|>
def readFile(read):
start_time = time()
text = ''
for i in read:
if i[-1:] == '\n':
spl = i[:-1].split('\t')
else:
spl = i.split('\t')
term = spl[1].replace('-', '').replace(',', '').replace(';', '')
spl2 = term.split(' ')
text += '| ' + spl[1]
elapsed_time = time() - start_time
return text
def quit_tilds(s):
replacements = ('á', 'a'), ('é', 'e'), ('í', 'i'), ('ó', 'o'), ('ú', 'u')
for a, b in replacements:
s = s.replace(a, b)
return s
<|reserved_special_token_0|>
def main(read, lang_in):
start_time = time()
text = readFile(read)
date = '2020-06-03'
lang = lang_in
termlist = text.split('| ')
print('RECIBE', termlist)
clean_text = clean_terms(termlist, lang_in)
join_clean_text = '| '.join(clean_text).replace('-', '').replace(',', ''
).replace(';', '')
anotador = annotate_timex(join_clean_text, date, lang)
anotador.sort()
if lang_in == 'es':
pattern = delete_pattern(anotador)
plural = quit_plural(pattern)
numbers = delete_numbers(plural)
tildes = acentos(numbers)
stop2 = clean_terms(tildes, lang_in)
print('FINALES', stop2)
"""new=open('../data/clean_terms_freq4.txt', 'w')#se imprime lo que se queda
for i in stop2:
new.write(i+'
')
new.close()
elapsed_time=time()-start_time
print('Main', elapsed_time)
return(stop2)"""
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def preprocessing_terms(termlist, lang_in, timeEx, patternBasedClean,
pluralClean, numbersClean, accentClean):
date = '2020-06-03'
print('terms:', termlist)
print('lang:', lang_in)
processedTerms = clean_terms(termlist, lang_in)
print('This is processedTerms ')
print(processedTerms)
if timeEx == True:
processedTerms = '| '.join(processedTerms).replace('-', '').replace(','
, '').replace(';', '')
processedTerms = annotate_timex(processedTerms, date, lang_in)
processedTerms.sort()
if lang_in == 'es' and patternBasedClean == True:
stanza.download('es')
pos_tagger = stanza.Pipeline('es')
processedTerms = delete_pattern(processedTerms, pos_tagger)
if lang_in == 'en' and patternBasedClean == True:
stanza.download('en')
pos_tagger = stanza.Pipeline('en')
processedTerms = delete_pattern(processedTerms, pos_tagger)
if lang_in == 'es' and pluralClean == True:
processedTerms = quit_plural(processedTerms)
if numbersClean == True:
processedTerms = delete_numbers(processedTerms)
if accentClean == True:
processedTerms = acentos(processedTerms)
processedTerms = clean_terms(processedTerms, lang_in)
return processedTerms
def clean_terms(termlist, lang_in):
start_time = time()
if lang_in == 'es':
stop = stopwords.words('spanish')
file = open(sw_spanish, 'r', encoding='utf-8')
mystop = file.readlines()
elif lang_in == 'en':
stop = stopwords.words('english')
file = open(sw_english, 'r', encoding='utf-8')
mystop = file.readlines()
clean_list = []
cont = 0
for i in mystop:
stop.append(i.strip())
deletes = []
for i in termlist:
k = i.strip(',.:')
if k.lower() in stop or k in stop:
deletes.append(k)
elif k.lower() not in stop or k not in stop:
clean_list.append(k.replace(',', '').replace('-', ''))
print(deletes)
cont = len(termlist) - len(clean_list)
elapsed_time = time() - start_time
txt = 'CLEAN_TERMS, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len
(clean_list)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
print('CLEAN_TERMS, DELETE', cont, len(clean_list), elapsed_time)
return clean_list
def annotate_timex(text, date, lang):
f = open('texto.txt', 'w')
f.write(text)
textanotador2 = ''
start_time = time()
url = 'https://annotador.oeg.fi.upm.es/annotate'
params = ('{"inputText":"' + text +
'","inputDate":"","domain":"legal","lan":"' + lang +
'","format":"timex3"}')
headers = {'Content-Type': 'application/json;charset=utf-8'}
response = requests.request('POST', url, headers=headers, data=params.
encode('utf8'))
textanotador = response.text
print('ENTRA ANOTADOR')
print(textanotador)
code = response.status_code
list_anotador = textanotador.split('|')
print(list_anotador)
deletes = []
cont = 0
for i in list_anotador:
if '<' in i and len(i) > 2:
cont = cont + 1
deletes.append(i)
ind = list_anotador.index(i)
list_anotador.pop(ind)
for i in list_anotador:
if '<' in i and len(i) > 2:
print(i)
cont = cont + 1
deletes.append(i)
ind = list_anotador.index(i)
list_anotador.pop(ind)
anotador = []
for i in list_anotador:
anotador.append(i.strip().replace(',', ''))
if code != 200:
print(
'WARNING: Annotador is down. Temporal expressions could not be removed.'
)
anotador = text.split('| ')
conts_log.error(
'Annotador is down. Temporal expressions could not be removed.',
code)
else:
elapsed_time = time() - start_time
txt = 'AÑOTADOR, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len
(anotador)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
print('AÑOTADOR DELETE', cont, len(anotador), elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return anotador
def infinitive(verb):
if verb[-2:] == 'ar' or verb[-2:] == 'er' or verb[-2:] == 'ir':
verb = verb
else:
if verb[-2:] == 'rá':
verb = verb[:-1]
if verb[-2:] == 'án':
verb = verb[:-2]
if verb[-2:] == 'ré':
verb = verb[:-1]
return verb
def delete_pattern(anotador, pos_tagger):
total = 0
deletes = []
start_time = time()
lemmas_list = []
cont = 0
cont_inf = 0
cont_post = 0
for i in anotador:
print('this is i')
print(i)
if len(i) > 1:
doc = pos_tagger(i)
sent = doc.sentences[0]
word = sent.words
tag = []
for token in word:
pos = token.upos
term = token.text
tupla = term, pos
tag.append(tupla)
print(token.text)
print(pos)
print('this is tag ')
print(tag)
total = total + 1
joini = i
list_pos = []
spl = joini.split(' ')
if joini != '':
join_tag = ''
for t in tag:
print('this is t')
print(t)
if t[1] == 'AUX':
doc = nlp(t[0])
lemlist = [tok.lemma_ for tok in doc]
lem = ''.join(lemlist)
lemmas_list.append(lem)
if lem == i:
lem = t[0]
list_pos.append('aux--' + str(lem))
if len(spl) == 1:
ind = anotador.index(str(i))
anotador[ind] = str(lem)
if t[1] == 'NOUN':
list_pos.append('noun-' + str(t[0]))
if t[1] == 'VERB':
cont_inf = cont_inf + 1
doc = nlp(t[0])
for tok in doc:
l = tok.lemma_
if l != t[0]:
cont_post = cont_post + 1
lemlist = [tok.lemma_ for tok in doc]
lem = ''.join(lemlist)
lemmas_list.append(lem)
if lem == i:
lem = t[0]
list_pos.append('verb-' + str(lem))
if len(spl) == 1:
ind = anotador.index(str(i))
anotador[ind] = str(lem)
if t[1] == 'ADV':
list_pos.append('adv--' + str(t[0]))
if t[1] == 'ADJ':
list_pos.append('adj--' + str(t[0]))
if t[1] == 'SCONJ':
list_pos.append('sconj' + str(t[0]))
spl_i = joini.split(' ')
if len(list_pos) == 1:
pos1 = list_pos[0]
if pos1[0:4] == 'adv-':
term = pos1[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
elif len(list_pos) == 2 and len(spl_i) == 2:
pos1 = list_pos[0]
pos2 = list_pos[1]
term = ''
if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'adj-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
elif len(list_pos) == 3 and len(spl_i) == 3:
pos1 = list_pos[0]
pos2 = list_pos[1]
pos3 = list_pos[2]
term = ''
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'verb':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'verb':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4
] == 'verb':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4
] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'adj-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'noun' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4
] == 'scon' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'scon' and pos3[0:4
] == 'adv-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'verb' and pos3[0:4
] == 'verb' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
for i in deletes:
if i in anotador:
ind = anotador.index(i)
anotador.pop(ind)
elapsed_time = time() - start_time
txt = 'PATRONES, DELETE' + ' (' + str(cont) + ') NEW LIST SIZE: (' + str(
len(anotador)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
print('PATRONES DELETE', cont, len(anotador), elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return anotador
def quit_plural(valuelist):
start_time = time()
file = open('./data/numberlist_es', 'r', encoding='utf-8')
read = file.readlines()
plural = []
cont = 0
for i in valuelist:
ind = valuelist.index(i)
term = i.replace(',', '').replace('-', ' ')
valuelist[ind] = term
plu = ''
if 'es' in term[-2:] or 's' in term[-1:]:
slp = term.split(' ')
for n in read:
if n[:-1] in slp:
plu = i
if not len(plu):
for j in slp:
if 'es' in j[-2:] and 't' not in j[-3:-2] and 'l' not in j[
-3:-2] or 'les' in j[-3:]:
plu += ' ' + j[:-2]
if 'on' in plu[-2:]:
plu = ' ' + plu[:-2] + 'ón'
if 'v' in plu[-1:]:
plu = ' ' + plu + 'e'
if 'bl' in plu[-2:]:
plu = ' ' + plu + 'e'
if 'br' in plu[-2:]:
plu = ' ' + plu + 'e'
elif 's' in j[-1:]:
plu += ' ' + j[:-1]
pos = slp.index(j)
if pos > 0:
bef = slp[0]
if 'n' in bef[-1:] and 'ón' not in bef[-2:]:
splb = plu.split(' ')
firts = splb[1]
if 'n' not in firts[-1:]:
pass
else:
plu0 = firts[:-1]
join1 = ' '.join(splb[2:])
plu = plu0 + ' ' + join1
else:
plu += ' ' + j
ind = valuelist.index(term)
valuelist[ind] = plu.strip()
cont = cont + 1
quit_plu = []
nuevalista = set(valuelist)
for i in nuevalista:
quit_plu.append(i)
deletes = []
new = []
for i in valuelist:
if i not in new:
new.append(i)
else:
deletes.append(i)
elapsed_time = time() - start_time
txt = 'PLURAL, DELETE' + ' (' + str(len(valuelist) - len(quit_plu)
) + ') NEW LIST SIZE: (' + str(len(quit_plu)) + ') TIME: (' + str(
elapsed_time) + ')'
joind = ', '.join(deletes)
print('PLURALES DELETE', len(valuelist) - len(quit_plu), len(quit_plu),
elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return quit_plu
<|reserved_special_token_0|>
def readFile(read):
start_time = time()
text = ''
for i in read:
if i[-1:] == '\n':
spl = i[:-1].split('\t')
else:
spl = i.split('\t')
term = spl[1].replace('-', '').replace(',', '').replace(';', '')
spl2 = term.split(' ')
text += '| ' + spl[1]
elapsed_time = time() - start_time
return text
def quit_tilds(s):
replacements = ('á', 'a'), ('é', 'e'), ('í', 'i'), ('ó', 'o'), ('ú', 'u')
for a, b in replacements:
s = s.replace(a, b)
return s
def acentos(last):
start_time = time()
til = []
list_acentos = []
for i in last:
acento = re.search('[áéíóúÁÉÍÓÚ]+', i)
if acento != None:
sin = quit_tilds(i)
list_acentos.append(i)
til.append(sin)
else:
til.append(i)
til2 = []
delete = []
for i in til:
if i not in til2:
til2.append(i)
else:
delete.append(i)
indices = []
delete2 = []
for i in last:
if i in delete and i not in indices:
indices.append(i)
delete2.append(i)
for i in delete2:
ind = last.index(i)
last.pop(ind)
last.sort()
elapsed_time = time() - start_time
return last
def main(read, lang_in):
start_time = time()
text = readFile(read)
date = '2020-06-03'
lang = lang_in
termlist = text.split('| ')
print('RECIBE', termlist)
clean_text = clean_terms(termlist, lang_in)
join_clean_text = '| '.join(clean_text).replace('-', '').replace(',', ''
).replace(';', '')
anotador = annotate_timex(join_clean_text, date, lang)
anotador.sort()
if lang_in == 'es':
pattern = delete_pattern(anotador)
plural = quit_plural(pattern)
numbers = delete_numbers(plural)
tildes = acentos(numbers)
stop2 = clean_terms(tildes, lang_in)
print('FINALES', stop2)
"""new=open('../data/clean_terms_freq4.txt', 'w')#se imprime lo que se queda
for i in stop2:
new.write(i+'
')
new.close()
elapsed_time=time()-start_time
print('Main', elapsed_time)
return(stop2)"""
<|reserved_special_token_1|>
import os
import json
import csv
import re
import requests
import spacy
import nltk
from nltk.parse import CoreNLPParser
from nltk.corpus import stopwords
from nltk.stem import PorterStemmer
stemmer = PorterStemmer()
from time import time
nlp = spacy.load('es_core_news_sm')
from modules_api import conts_log
sw_spanish = './data/stop-esp.txt'
sw_english = './data/stop-eng.txt'
inner_spanish = './data/inner-stop-esp.txt'
inner_english = './data/inner-stop-eng.txt'
import stanza
<|reserved_special_token_0|>
def preprocessing_terms(termlist, lang_in, timeEx, patternBasedClean,
pluralClean, numbersClean, accentClean):
date = '2020-06-03'
print('terms:', termlist)
print('lang:', lang_in)
processedTerms = clean_terms(termlist, lang_in)
print('This is processedTerms ')
print(processedTerms)
if timeEx == True:
processedTerms = '| '.join(processedTerms).replace('-', '').replace(','
, '').replace(';', '')
processedTerms = annotate_timex(processedTerms, date, lang_in)
processedTerms.sort()
if lang_in == 'es' and patternBasedClean == True:
stanza.download('es')
pos_tagger = stanza.Pipeline('es')
processedTerms = delete_pattern(processedTerms, pos_tagger)
if lang_in == 'en' and patternBasedClean == True:
stanza.download('en')
pos_tagger = stanza.Pipeline('en')
processedTerms = delete_pattern(processedTerms, pos_tagger)
if lang_in == 'es' and pluralClean == True:
processedTerms = quit_plural(processedTerms)
if numbersClean == True:
processedTerms = delete_numbers(processedTerms)
if accentClean == True:
processedTerms = acentos(processedTerms)
processedTerms = clean_terms(processedTerms, lang_in)
return processedTerms
def clean_terms(termlist, lang_in):
start_time = time()
if lang_in == 'es':
stop = stopwords.words('spanish')
file = open(sw_spanish, 'r', encoding='utf-8')
mystop = file.readlines()
elif lang_in == 'en':
stop = stopwords.words('english')
file = open(sw_english, 'r', encoding='utf-8')
mystop = file.readlines()
clean_list = []
cont = 0
for i in mystop:
stop.append(i.strip())
deletes = []
for i in termlist:
k = i.strip(',.:')
if k.lower() in stop or k in stop:
deletes.append(k)
elif k.lower() not in stop or k not in stop:
clean_list.append(k.replace(',', '').replace('-', ''))
print(deletes)
cont = len(termlist) - len(clean_list)
elapsed_time = time() - start_time
txt = 'CLEAN_TERMS, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len
(clean_list)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
print('CLEAN_TERMS, DELETE', cont, len(clean_list), elapsed_time)
return clean_list
def annotate_timex(text, date, lang):
f = open('texto.txt', 'w')
f.write(text)
textanotador2 = ''
start_time = time()
url = 'https://annotador.oeg.fi.upm.es/annotate'
params = ('{"inputText":"' + text +
'","inputDate":"","domain":"legal","lan":"' + lang +
'","format":"timex3"}')
headers = {'Content-Type': 'application/json;charset=utf-8'}
response = requests.request('POST', url, headers=headers, data=params.
encode('utf8'))
textanotador = response.text
print('ENTRA ANOTADOR')
print(textanotador)
code = response.status_code
list_anotador = textanotador.split('|')
print(list_anotador)
deletes = []
cont = 0
for i in list_anotador:
if '<' in i and len(i) > 2:
cont = cont + 1
deletes.append(i)
ind = list_anotador.index(i)
list_anotador.pop(ind)
for i in list_anotador:
if '<' in i and len(i) > 2:
print(i)
cont = cont + 1
deletes.append(i)
ind = list_anotador.index(i)
list_anotador.pop(ind)
anotador = []
for i in list_anotador:
anotador.append(i.strip().replace(',', ''))
if code != 200:
print(
'WARNING: Annotador is down. Temporal expressions could not be removed.'
)
anotador = text.split('| ')
conts_log.error(
'Annotador is down. Temporal expressions could not be removed.',
code)
else:
elapsed_time = time() - start_time
txt = 'AÑOTADOR, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len
(anotador)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
print('AÑOTADOR DELETE', cont, len(anotador), elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return anotador
def infinitive(verb):
if verb[-2:] == 'ar' or verb[-2:] == 'er' or verb[-2:] == 'ir':
verb = verb
else:
if verb[-2:] == 'rá':
verb = verb[:-1]
if verb[-2:] == 'án':
verb = verb[:-2]
if verb[-2:] == 'ré':
verb = verb[:-1]
return verb
def delete_pattern(anotador, pos_tagger):
total = 0
deletes = []
start_time = time()
lemmas_list = []
cont = 0
cont_inf = 0
cont_post = 0
for i in anotador:
print('this is i')
print(i)
if len(i) > 1:
doc = pos_tagger(i)
sent = doc.sentences[0]
word = sent.words
tag = []
for token in word:
pos = token.upos
term = token.text
tupla = term, pos
tag.append(tupla)
print(token.text)
print(pos)
print('this is tag ')
print(tag)
total = total + 1
joini = i
list_pos = []
spl = joini.split(' ')
if joini != '':
join_tag = ''
for t in tag:
print('this is t')
print(t)
if t[1] == 'AUX':
doc = nlp(t[0])
lemlist = [tok.lemma_ for tok in doc]
lem = ''.join(lemlist)
lemmas_list.append(lem)
if lem == i:
lem = t[0]
list_pos.append('aux--' + str(lem))
if len(spl) == 1:
ind = anotador.index(str(i))
anotador[ind] = str(lem)
if t[1] == 'NOUN':
list_pos.append('noun-' + str(t[0]))
if t[1] == 'VERB':
cont_inf = cont_inf + 1
doc = nlp(t[0])
for tok in doc:
l = tok.lemma_
if l != t[0]:
cont_post = cont_post + 1
lemlist = [tok.lemma_ for tok in doc]
lem = ''.join(lemlist)
lemmas_list.append(lem)
if lem == i:
lem = t[0]
list_pos.append('verb-' + str(lem))
if len(spl) == 1:
ind = anotador.index(str(i))
anotador[ind] = str(lem)
if t[1] == 'ADV':
list_pos.append('adv--' + str(t[0]))
if t[1] == 'ADJ':
list_pos.append('adj--' + str(t[0]))
if t[1] == 'SCONJ':
list_pos.append('sconj' + str(t[0]))
spl_i = joini.split(' ')
if len(list_pos) == 1:
pos1 = list_pos[0]
if pos1[0:4] == 'adv-':
term = pos1[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
elif len(list_pos) == 2 and len(spl_i) == 2:
pos1 = list_pos[0]
pos2 = list_pos[1]
term = ''
if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'adj-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'verb':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adv-' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'adv-':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun':
term = pos1[5:] + ' ' + pos2[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
elif len(list_pos) == 3 and len(spl_i) == 3:
pos1 = list_pos[0]
pos2 = list_pos[1]
pos3 = list_pos[2]
term = ''
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'verb':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'verb':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4
] == 'verb':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4
] == 'aux-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb' and pos3[0:4
] == 'noun':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'adj-':
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4
] == 'noun' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4
] == 'scon' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'scon' and pos3[0:4
] == 'adv-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'verb' and pos2[0:4] == 'verb' and pos3[0:4
] == 'verb' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun' and pos3[0:4
] == 'adj-' and joini in anotador:
term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]
deletes.append(joini)
ind = anotador.index(joini)
cont = cont + 1
for i in deletes:
if i in anotador:
ind = anotador.index(i)
anotador.pop(ind)
elapsed_time = time() - start_time
txt = 'PATRONES, DELETE' + ' (' + str(cont) + ') NEW LIST SIZE: (' + str(
len(anotador)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
print('PATRONES DELETE', cont, len(anotador), elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return anotador
def quit_plural(valuelist):
start_time = time()
file = open('./data/numberlist_es', 'r', encoding='utf-8')
read = file.readlines()
plural = []
cont = 0
for i in valuelist:
ind = valuelist.index(i)
term = i.replace(',', '').replace('-', ' ')
valuelist[ind] = term
plu = ''
if 'es' in term[-2:] or 's' in term[-1:]:
slp = term.split(' ')
for n in read:
if n[:-1] in slp:
plu = i
if not len(plu):
for j in slp:
if 'es' in j[-2:] and 't' not in j[-3:-2] and 'l' not in j[
-3:-2] or 'les' in j[-3:]:
plu += ' ' + j[:-2]
if 'on' in plu[-2:]:
plu = ' ' + plu[:-2] + 'ón'
if 'v' in plu[-1:]:
plu = ' ' + plu + 'e'
if 'bl' in plu[-2:]:
plu = ' ' + plu + 'e'
if 'br' in plu[-2:]:
plu = ' ' + plu + 'e'
elif 's' in j[-1:]:
plu += ' ' + j[:-1]
pos = slp.index(j)
if pos > 0:
bef = slp[0]
if 'n' in bef[-1:] and 'ón' not in bef[-2:]:
splb = plu.split(' ')
firts = splb[1]
if 'n' not in firts[-1:]:
pass
else:
plu0 = firts[:-1]
join1 = ' '.join(splb[2:])
plu = plu0 + ' ' + join1
else:
plu += ' ' + j
ind = valuelist.index(term)
valuelist[ind] = plu.strip()
cont = cont + 1
quit_plu = []
nuevalista = set(valuelist)
for i in nuevalista:
quit_plu.append(i)
deletes = []
new = []
for i in valuelist:
if i not in new:
new.append(i)
else:
deletes.append(i)
elapsed_time = time() - start_time
txt = 'PLURAL, DELETE' + ' (' + str(len(valuelist) - len(quit_plu)
) + ') NEW LIST SIZE: (' + str(len(quit_plu)) + ') TIME: (' + str(
elapsed_time) + ')'
joind = ', '.join(deletes)
print('PLURALES DELETE', len(valuelist) - len(quit_plu), len(quit_plu),
elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return quit_plu
def delete_numbers(list_):
start_time = time()
file = open('./data/numberlist_es', 'r', encoding='utf-8')
read = file.readlines()
cont = 0
deletes = []
for i in read:
if i[-1:] == '\n':
i = i[:-1]
for j in list_:
if ' ' + i + ' ' in ' ' + j + ' ':
deletes.append(j)
ind = list_.index(j)
cont = cont + 1
list_.pop(ind)
elapsed_time = time() - start_time
txt = 'NUMBERS, DELETE' + ' (' + str(cont) + ') NEW LIST SIZE: (' + str(len
(list_)) + ') TIME: (' + str(elapsed_time) + ')'
joind = ', '.join(deletes)
print('NUMEROS DELETE', cont, len(list_), elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: ' + joind)
return list_
def readFile(read):
start_time = time()
text = ''
for i in read:
if i[-1:] == '\n':
spl = i[:-1].split('\t')
else:
spl = i.split('\t')
term = spl[1].replace('-', '').replace(',', '').replace(';', '')
spl2 = term.split(' ')
text += '| ' + spl[1]
elapsed_time = time() - start_time
return text
def quit_tilds(s):
replacements = ('á', 'a'), ('é', 'e'), ('í', 'i'), ('ó', 'o'), ('ú', 'u')
for a, b in replacements:
s = s.replace(a, b)
return s
def acentos(last):
start_time = time()
til = []
list_acentos = []
for i in last:
acento = re.search('[áéíóúÁÉÍÓÚ]+', i)
if acento != None:
sin = quit_tilds(i)
list_acentos.append(i)
til.append(sin)
else:
til.append(i)
til2 = []
delete = []
for i in til:
if i not in til2:
til2.append(i)
else:
delete.append(i)
indices = []
delete2 = []
for i in last:
if i in delete and i not in indices:
indices.append(i)
delete2.append(i)
for i in delete2:
ind = last.index(i)
last.pop(ind)
last.sort()
elapsed_time = time() - start_time
return last
def main(read, lang_in):
start_time = time()
text = readFile(read)
date = '2020-06-03'
lang = lang_in
termlist = text.split('| ')
print('RECIBE', termlist)
clean_text = clean_terms(termlist, lang_in)
join_clean_text = '| '.join(clean_text).replace('-', '').replace(',', ''
).replace(';', '')
anotador = annotate_timex(join_clean_text, date, lang)
anotador.sort()
if lang_in == 'es':
pattern = delete_pattern(anotador)
plural = quit_plural(pattern)
numbers = delete_numbers(plural)
tildes = acentos(numbers)
stop2 = clean_terms(tildes, lang_in)
print('FINALES', stop2)
"""new=open('../data/clean_terms_freq4.txt', 'w')#se imprime lo que se queda
for i in stop2:
new.write(i+'
')
new.close()
elapsed_time=time()-start_time
print('Main', elapsed_time)
return(stop2)"""
<|reserved_special_token_1|>
import os
import json
import csv
import re
import requests
import spacy
import nltk
from nltk.parse import CoreNLPParser
from nltk.corpus import stopwords
from nltk.stem import PorterStemmer
stemmer = PorterStemmer()
from time import time
nlp = spacy.load('es_core_news_sm')
from modules_api import conts_log
sw_spanish="./data/stop-esp.txt"
sw_english="./data/stop-eng.txt"
inner_spanish="./data/inner-stop-esp.txt"
inner_english="./data/inner-stop-eng.txt"
import stanza
### METODO PARA EL SERVICIO
'''
como el main de debajo. este método va a ser el controlador.
Mediante parámetros va a decidir qué procesos va a seguir
termList: array/lista de terminos
lang: string con el idoma : es, en
timeEx: booleano que activa si se aplica timex o no
patternBasedClean: booleano que activa si se aplican patrones o no
pluralClean: booleano que activa si se aplica limpieza de plurales o no
numbersClean: booleano que activa si se aplica limpieza de numeros o no
accentClean: booleano que activa si se aplica limpieza de acentos o no
'''
def preprocessing_terms(termlist, lang_in, timeEx, patternBasedClean, pluralClean, numbersClean, accentClean):
date='2020-06-03' # esto debería ser automatico
print('terms:', termlist)
print('lang:', lang_in)
# servicio básico, creo que se debería hacer siempre
processedTerms=clean_terms(termlist, lang_in)
print('This is processedTerms ')
print(processedTerms)
#print('this is timex' + timeEx)
# Todo siempre sobre la misma variable: processedTerms. Da igual el camino que cojas. Usas la lista de terminos y se modifica.
#opcional
if(timeEx==True):
processedTerms='| '.join(processedTerms).replace('-', '').replace(',', '').replace(';', '')
processedTerms=annotate_timex(processedTerms, date, lang_in)
processedTerms.sort()
#opcional
if((lang_in=='es') and (patternBasedClean==True)):
stanza.download('es')
pos_tagger=stanza.Pipeline('es')
processedTerms=delete_pattern(processedTerms, pos_tagger)
if((lang_in=='en') and (patternBasedClean==True)):
stanza.download('en')
pos_tagger=stanza.Pipeline('en')
processedTerms=delete_pattern(processedTerms, pos_tagger)
#opcional
if((lang_in=='es') and (pluralClean==True)):
processedTerms=quit_plural(processedTerms)
#opcional
if(numbersClean==True):
processedTerms=delete_numbers(processedTerms)
#opcional
if(accentClean==True):
processedTerms=acentos(processedTerms)
#final clean
processedTerms=clean_terms(processedTerms, lang_in)
#devolvemos los terminos
return processedTerms
# 0 clean punctuation and stopwords
def clean_terms(termlist, lang_in):
start_time=time()
if(lang_in=="es"):
stop=stopwords.words('spanish')
file=open(sw_spanish, 'r', encoding='utf-8')
mystop=file.readlines()
elif(lang_in=="en"):
stop=stopwords.words('english')
file=open(sw_english, 'r', encoding='utf-8')
mystop=file.readlines()
clean_list = []
cont=0
for i in mystop:
#print(i.strip())
stop.append(i.strip())
#print(stop)
deletes=[]
for i in termlist:
k=i.strip(',.:')
# print(k)
if ((k.lower() in stop) or (k in stop)):
deletes.append(k)
elif ((k.lower() not in stop) or (k not in stop)):
clean_list.append(k.replace(',', '').replace('-', ''))
print(deletes)
cont=len(termlist)-len(clean_list)
elapsed_time=time()-start_time
txt='CLEAN_TERMS, DELETE ('+str(cont)+') NEW LIST SIZE: ('+str(len(clean_list))+') TIME: ('+str(elapsed_time)+')'
joind=', '.join(deletes)
conts_log.information(txt, 'TERMS REMOVED: '+joind)
print('CLEAN_TERMS, DELETE', cont, len(clean_list), elapsed_time )
return(clean_list)
# 1 añotador
def annotate_timex(text, date, lang):
f=open('texto.txt', 'w')
f.write(text)
textanotador2=''
start_time=time()
url = 'https://annotador.oeg.fi.upm.es/annotate'
params = "{\"inputText\":\""+text+"\",\"inputDate\":\"\",\"domain\":\"legal\",\"lan\":\""+lang+"\",\"format\":\"timex3\"}"
headers = {
'Content-Type': 'application/json;charset=utf-8'
}
#response=requests.post(url, data=params)
response=requests.request("POST", url, headers=headers, data = params.encode('utf8'))
textanotador=response.text
print('ENTRA ANOTADOR')
print(textanotador)
code=response.status_code
list_anotador=textanotador.split('|')
print(list_anotador)
deletes=[]
cont=0
for i in list_anotador:
if('<' in i and len(i)>2):
cont=cont+1
deletes.append(i)
ind=list_anotador.index(i)
list_anotador.pop(ind)
for i in list_anotador:
if('<' in i and len(i)>2):
print(i)
cont=cont+1
deletes.append(i)
ind=list_anotador.index(i)
list_anotador.pop(ind)
anotador=[]
for i in list_anotador:
anotador.append(i.strip().replace(',', ''))
if(code!=200):
print('WARNING: Annotador is down. Temporal expressions could not be removed.' )
anotador=text.split('| ')
conts_log.error('Annotador is down. Temporal expressions could not be removed.', code)
else:
elapsed_time=time()-start_time
txt='AÑOTADOR, DELETE ('+str(cont)+') NEW LIST SIZE: ('+str(len(anotador))+') TIME: ('+str(elapsed_time)+')'
joind=', '.join(deletes)
print('AÑOTADOR DELETE', cont, len(anotador), elapsed_time )
conts_log.information(txt, 'TERMS REMOVED: '+joind)
return(anotador)
def infinitive(verb):
if(verb[-2:]=='ar' or verb[-2:]=='er' or verb[-2:]=='ir'):
verb=verb
else:
if(verb[-2:]=='rá' ):
#print('---',verb,'-',verb[:-1])
verb=verb[:-1]
if(verb[-2:]=='án'):
#print('---',verb,'-',verb[:-2])
verb=verb[:-2]
if(verb[-2:]=='ré'):
#print('---',verb,'-',verb[:-1])
verb=verb[:-1]
return (verb)
# 2.1 patrones es
def delete_pattern(anotador, pos_tagger):
total=0
deletes=[]
start_time=time()
lemmas_list=[]
cont=0
cont_inf=0
cont_post=0
for i in anotador:
print('this is i')
print(i)
if(len(i)>1):
#print( i, i.split(' ') )
#pos_tagger = CoreNLPParser('https://corenlp.run/', tagtype='pos')
#si se cae el de lynx, probar con este https://corenlp.run/
#print(i)
doc=pos_tagger(i)
#print(doc)
sent=doc.sentences[0]
word=sent.words
tag=[]
for token in word:
pos=token.upos
term=token.text
tupla=(term, pos)
tag.append(tupla)
print(token.text)
print(pos)
#tag=pos_tagger.tag(i.split(' '))
print('this is tag ')
print(tag)
total=total+1
joini=i
list_pos=[]
spl=joini.split(' ')
if(joini!=''):
join_tag=''
for t in tag:
print('this is t')
print(t)
if(t[1] == 'AUX' ):
doc=nlp(t[0])
lemlist=[tok.lemma_ for tok in doc]
lem=''.join(lemlist)
lemmas_list.append(lem)
if(lem==i):
lem=t[0]
list_pos.append('aux--'+str(lem))
if(len(spl)==1):
ind=anotador.index(str(i))
anotador[ind]=str(lem)
if(t[1] == 'NOUN'):
list_pos.append('noun-'+str(t[0]))
if(t[1] == 'VERB'):
cont_inf=cont_inf+1
doc=nlp(t[0])
for tok in doc:
l=tok.lemma_
if(l!=t[0]):
cont_post=cont_post+1
lemlist=[tok.lemma_ for tok in doc]
lem=''.join(lemlist)
lemmas_list.append(lem)
if(lem==i):
lem=t[0]
list_pos.append('verb-'+str(lem))
if(len(spl)==1):
ind=anotador.index(str(i))
anotador[ind]=str(lem)
if(t[1] == 'ADV'):
list_pos.append('adv--'+str(t[0]))
if(t[1] == 'ADJ'):
list_pos.append('adj--'+str(t[0]))
if(t[1] == 'SCONJ'):
list_pos.append('sconj'+str(t[0]))
spl_i=joini.split(' ')
if(len(list_pos)==1):
pos1=list_pos[0]
if(pos1[0:4]=='adv-' ):
term=pos1[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
elif(len(list_pos)==2 and len(spl_i)==2):
pos1=list_pos[0]
pos2=list_pos[1]
term=''
if(pos1[0:4]=='aux-' and pos2[0:4]=='verb'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='verb' and pos2[0:4]=='aux-'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='verb' and pos2[0:4]=='verb'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='verb'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='aux-'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='adv-' and pos2[0:4]=='adj-'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='adj-' and pos2[0:4]=='adv-'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='adv-' and pos2[0:4]=='aux-'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='aux-' and pos2[0:4]=='adv-'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='adv-' and pos2[0:4]=='verb'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='verb' and pos2[0:4]=='aux-'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='adv-'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='adv-' and pos2[0:4]=='noun'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='verb' and pos2[0:4]=='adv-'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='verb' and pos2[0:4]=='noun'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='aux-' and pos2[0:4]=='noun'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='adj-' and pos2[0:4]=='noun'):
term=pos1[5:]+' '+pos2[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
elif(len(list_pos)==3 and len(spl_i)==3):
#print(list_pos, spl_i,'-', len(list_pos), len(spl_i))
pos1=list_pos[0]
pos2=list_pos[1]
pos3=list_pos[2]
term=''
if(pos1[0:4]=='noun' and pos2[0:4]=='verb' and pos3[0:4]=='verb'):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='aux-' and pos3[0:4]=='verb'):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='aux-' and pos3[0:4]=='aux-'):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='verb' and pos3[0:4]=='aux-'):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='verb' and pos3[0:4]=='noun'):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='aux-' and pos3[0:4]=='noun'):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='verb' and pos2[0:4]=='noun' and pos3[0:4]=='noun'):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='noun' and pos3[0:4]=='verb'):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='aux-' and pos2[0:4]=='noun' and pos3[0:4]=='noun'):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='noun' and pos3[0:4]=='aux-'):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='aux-' and pos2[0:4]=='verb' and pos3[0:4]=='noun'):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='verb' and pos3[0:4]=='adj-'):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='verb' and pos3[0:4]=='noun' and joini in anotador):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='verb' and pos2[0:4]=='noun' and pos3[0:4]=='adj-' and joini in anotador):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='aux-' and pos3[0:4]=='adj-' and joini in anotador):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='adv-' and pos3[0:4]=='adj-' and joini in anotador):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='adj-' and pos2[0:4]=='adv-' and pos3[0:4]=='adj-' and joini in anotador):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='noun' and pos2[0:4]=='adv-' and pos3[0:4]=='scon' and joini in anotador):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='adj-' and pos2[0:4]=='scon' and pos3[0:4]=='adv-' and joini in anotador):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='aux-' and pos2[0:4]=='noun' and pos3[0:4]=='adj-' and joini in anotador):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='verb' and pos2[0:4]=='verb' and pos3[0:4]=='verb' and joini in anotador):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
if(pos1[0:4]=='adj-' and pos2[0:4]=='noun' and pos3[0:4]=='adj-' and joini in anotador):
term=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]
deletes.append(joini)
ind=anotador.index(joini)
#anotador.pop(ind)
cont=cont+1
for i in deletes:
if(i in anotador):
ind=anotador.index(i)
anotador.pop(ind)
elapsed_time=time()-start_time
txt='PATRONES, DELETE'+' ('+str(cont)+') NEW LIST SIZE: ('+str(len(anotador))+') TIME: ('+str(elapsed_time)+')'
joind=', '.join(deletes)
print('PATRONES DELETE', cont, len(anotador), elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: '+joind)
return(anotador)
# 3 plurales
def quit_plural(valuelist):
start_time=time()
file=open('./data/numberlist_es', 'r', encoding='utf-8')
read=file.readlines()
plural=[]
cont=0
for i in valuelist:
ind=valuelist.index(i)
term=i.replace(',', '').replace('-', ' ')
valuelist[ind]=term
plu=''
if('es' in term[-2:] or 's' in term[-1:]):
slp=term.split(' ')
for n in read:
if(n[:-1] in slp):
plu=i
if not len(plu):
for j in slp:
if( ('es' in j[-2:] ) and 't' not in j[-3:-2] and 'l' not in j[-3:-2] or ('les' in j[-3:] ) ):
plu+=' '+j[:-2]
if('on' in plu[-2:]):
plu=' '+plu[:-2]+'ón'
if('v' in plu[-1:]):
plu=' '+plu+'e'
if('bl' in plu[-2:]):
plu=' '+plu+'e'
if('br' in plu[-2:]):
plu=' '+plu+'e'
elif(('s' in j[-1:]) ):
plu+=' '+j[:-1]
pos=slp.index(j)
if(pos>0):
bef=slp[0]
if('n' in bef[-1:] and 'ón' not in bef[-2:]):
splb=plu.split(' ')
firts=splb[1]
if('n' not in firts[-1:]):
pass
else:
plu0=firts[:-1]
join1=' '.join(splb[2:])
plu=plu0+' '+join1
else:
plu+=' '+j
ind=valuelist.index(term)
valuelist[ind]=plu.strip()
cont=cont+1
quit_plu=[]
nuevalista=set(valuelist)
for i in nuevalista:
quit_plu.append(i)
deletes = []
new=[]
for i in valuelist:
if i not in new:
new.append(i)
else:
deletes.append(i)
#print('plurañes eliminadas ->', deletes)
elapsed_time=time()-start_time
txt='PLURAL, DELETE'+' ('+str(len(valuelist)-len(quit_plu))+') NEW LIST SIZE: ('+str(len(quit_plu))+') TIME: ('+str(elapsed_time)+')'
joind=', '.join(deletes)
print('PLURALES DELETE', len(valuelist)-len(quit_plu), len(quit_plu), elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: '+joind)
return(quit_plu)
# 4 numeros
def delete_numbers(list_):
start_time=time()
file=open('./data/numberlist_es', 'r', encoding='utf-8')
read=file.readlines()
cont=0
deletes=[]
for i in read:
if(i[-1:]=='\n'):
i=i[:-1]
for j in list_:
if(' '+i+' ' in ' '+j+' ' ):
deletes.append(j)
ind=list_.index(j)
cont=cont+1
list_.pop(ind)
#list_.sort()
elapsed_time=time()-start_time
txt='NUMBERS, DELETE'+' ('+str(cont)+') NEW LIST SIZE: ('+str(len(list_))+') TIME: ('+str(elapsed_time)+')'
joind=', '.join(deletes)
print('NUMEROS DELETE', cont, len(list_), elapsed_time)
conts_log.information(txt, 'TERMS REMOVED: '+joind)
return(list_)
# 5 leer archivo
def readFile(read):
start_time=time()
text=''
for i in read:
if(i[-1:]=='\n'):
spl=i[:-1].split('\t')
else:
spl=i.split('\t')
term=spl[1].replace('-', '').replace(',', '').replace(';', '')
spl2=term.split(' ')
text+='| '+spl[1]
elapsed_time=time()-start_time
return text
#elimina tildes
def quit_tilds(s):
replacements = (
("á", "a"),
("é", "e"),
("í", "i"),
("ó", "o"),
("ú", "u"),
)
for a, b in replacements:
s = s.replace(a, b)
return s
def acentos(last):
start_time=time()
til=[]
list_acentos=[]
for i in last:
acento=re.search("[áéíóúÁÉÍÓÚ]+", i)
if(acento!=None):
sin=quit_tilds(i)
list_acentos.append(i)
til.append(sin)
else:
til.append(i)
til2 = []
delete=[]
for i in til:
if i not in til2:
til2.append(i)
else:
delete.append(i)
indices=[]
delete2=[]
for i in last:
if(i in delete and i not in indices):
indices.append(i)
delete2.append(i)
for i in delete2:
ind=last.index(i)
last.pop(ind)
last.sort()
elapsed_time=time()-start_time
return(last)
#-------MAIN-------#
def main(read, lang_in):
start_time=time()
text=readFile(read)
date='2020-06-03'
lang=lang_in
termlist=text.split('| ')
print('RECIBE', termlist)
clean_text=clean_terms(termlist, lang_in)
join_clean_text='| '.join(clean_text).replace('-', '').replace(',', '').replace(';', '')
anotador=annotate_timex(join_clean_text, date, lang)
anotador.sort()
if(lang_in=='es'):
pattern=delete_pattern(anotador)
plural=quit_plural(pattern)
numbers=delete_numbers(plural)
tildes=acentos(numbers)
stop2=clean_terms(tildes, lang_in)
print('FINALES', stop2)
'''new=open('../data/clean_terms_freq4.txt', 'w')#se imprime lo que se queda
for i in stop2:
new.write(i+'\n')
new.close()
elapsed_time=time()-start_time
print('Main', elapsed_time)
return(stop2)'''
#file=open('../data/estatuto_es.txt', 'r', encoding='utf-8')
#read=file.readlines()
#main(read)
|
flexible
|
{
"blob_id": "afb0359f4cdf5ed32bb785d969e9bf8919bb6add",
"index": 3408,
"step-1": "<mask token>\n\n\ndef preprocessing_terms(termlist, lang_in, timeEx, patternBasedClean,\n pluralClean, numbersClean, accentClean):\n date = '2020-06-03'\n print('terms:', termlist)\n print('lang:', lang_in)\n processedTerms = clean_terms(termlist, lang_in)\n print('This is processedTerms ')\n print(processedTerms)\n if timeEx == True:\n processedTerms = '| '.join(processedTerms).replace('-', '').replace(','\n , '').replace(';', '')\n processedTerms = annotate_timex(processedTerms, date, lang_in)\n processedTerms.sort()\n if lang_in == 'es' and patternBasedClean == True:\n stanza.download('es')\n pos_tagger = stanza.Pipeline('es')\n processedTerms = delete_pattern(processedTerms, pos_tagger)\n if lang_in == 'en' and patternBasedClean == True:\n stanza.download('en')\n pos_tagger = stanza.Pipeline('en')\n processedTerms = delete_pattern(processedTerms, pos_tagger)\n if lang_in == 'es' and pluralClean == True:\n processedTerms = quit_plural(processedTerms)\n if numbersClean == True:\n processedTerms = delete_numbers(processedTerms)\n if accentClean == True:\n processedTerms = acentos(processedTerms)\n processedTerms = clean_terms(processedTerms, lang_in)\n return processedTerms\n\n\ndef clean_terms(termlist, lang_in):\n start_time = time()\n if lang_in == 'es':\n stop = stopwords.words('spanish')\n file = open(sw_spanish, 'r', encoding='utf-8')\n mystop = file.readlines()\n elif lang_in == 'en':\n stop = stopwords.words('english')\n file = open(sw_english, 'r', encoding='utf-8')\n mystop = file.readlines()\n clean_list = []\n cont = 0\n for i in mystop:\n stop.append(i.strip())\n deletes = []\n for i in termlist:\n k = i.strip(',.:')\n if k.lower() in stop or k in stop:\n deletes.append(k)\n elif k.lower() not in stop or k not in stop:\n clean_list.append(k.replace(',', '').replace('-', ''))\n print(deletes)\n cont = len(termlist) - len(clean_list)\n elapsed_time = time() - start_time\n txt = 'CLEAN_TERMS, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len\n (clean_list)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n print('CLEAN_TERMS, DELETE', cont, len(clean_list), elapsed_time)\n return clean_list\n\n\ndef annotate_timex(text, date, lang):\n f = open('texto.txt', 'w')\n f.write(text)\n textanotador2 = ''\n start_time = time()\n url = 'https://annotador.oeg.fi.upm.es/annotate'\n params = ('{\"inputText\":\"' + text +\n '\",\"inputDate\":\"\",\"domain\":\"legal\",\"lan\":\"' + lang +\n '\",\"format\":\"timex3\"}')\n headers = {'Content-Type': 'application/json;charset=utf-8'}\n response = requests.request('POST', url, headers=headers, data=params.\n encode('utf8'))\n textanotador = response.text\n print('ENTRA ANOTADOR')\n print(textanotador)\n code = response.status_code\n list_anotador = textanotador.split('|')\n print(list_anotador)\n deletes = []\n cont = 0\n for i in list_anotador:\n if '<' in i and len(i) > 2:\n cont = cont + 1\n deletes.append(i)\n ind = list_anotador.index(i)\n list_anotador.pop(ind)\n for i in list_anotador:\n if '<' in i and len(i) > 2:\n print(i)\n cont = cont + 1\n deletes.append(i)\n ind = list_anotador.index(i)\n list_anotador.pop(ind)\n anotador = []\n for i in list_anotador:\n anotador.append(i.strip().replace(',', ''))\n if code != 200:\n print(\n 'WARNING: Annotador is down. Temporal expressions could not be removed.'\n )\n anotador = text.split('| ')\n conts_log.error(\n 'Annotador is down. Temporal expressions could not be removed.',\n code)\n else:\n elapsed_time = time() - start_time\n txt = 'AÑOTADOR, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len\n (anotador)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('AÑOTADOR DELETE', cont, len(anotador), elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return anotador\n\n\ndef infinitive(verb):\n if verb[-2:] == 'ar' or verb[-2:] == 'er' or verb[-2:] == 'ir':\n verb = verb\n else:\n if verb[-2:] == 'rá':\n verb = verb[:-1]\n if verb[-2:] == 'án':\n verb = verb[:-2]\n if verb[-2:] == 'ré':\n verb = verb[:-1]\n return verb\n\n\ndef delete_pattern(anotador, pos_tagger):\n total = 0\n deletes = []\n start_time = time()\n lemmas_list = []\n cont = 0\n cont_inf = 0\n cont_post = 0\n for i in anotador:\n print('this is i')\n print(i)\n if len(i) > 1:\n doc = pos_tagger(i)\n sent = doc.sentences[0]\n word = sent.words\n tag = []\n for token in word:\n pos = token.upos\n term = token.text\n tupla = term, pos\n tag.append(tupla)\n print(token.text)\n print(pos)\n print('this is tag ')\n print(tag)\n total = total + 1\n joini = i\n list_pos = []\n spl = joini.split(' ')\n if joini != '':\n join_tag = ''\n for t in tag:\n print('this is t')\n print(t)\n if t[1] == 'AUX':\n doc = nlp(t[0])\n lemlist = [tok.lemma_ for tok in doc]\n lem = ''.join(lemlist)\n lemmas_list.append(lem)\n if lem == i:\n lem = t[0]\n list_pos.append('aux--' + str(lem))\n if len(spl) == 1:\n ind = anotador.index(str(i))\n anotador[ind] = str(lem)\n if t[1] == 'NOUN':\n list_pos.append('noun-' + str(t[0]))\n if t[1] == 'VERB':\n cont_inf = cont_inf + 1\n doc = nlp(t[0])\n for tok in doc:\n l = tok.lemma_\n if l != t[0]:\n cont_post = cont_post + 1\n lemlist = [tok.lemma_ for tok in doc]\n lem = ''.join(lemlist)\n lemmas_list.append(lem)\n if lem == i:\n lem = t[0]\n list_pos.append('verb-' + str(lem))\n if len(spl) == 1:\n ind = anotador.index(str(i))\n anotador[ind] = str(lem)\n if t[1] == 'ADV':\n list_pos.append('adv--' + str(t[0]))\n if t[1] == 'ADJ':\n list_pos.append('adj--' + str(t[0]))\n if t[1] == 'SCONJ':\n list_pos.append('sconj' + str(t[0]))\n spl_i = joini.split(' ')\n if len(list_pos) == 1:\n pos1 = list_pos[0]\n if pos1[0:4] == 'adv-':\n term = pos1[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n elif len(list_pos) == 2 and len(spl_i) == 2:\n pos1 = list_pos[0]\n pos2 = list_pos[1]\n term = ''\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'adj-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n elif len(list_pos) == 3 and len(spl_i) == 3:\n pos1 = list_pos[0]\n pos2 = list_pos[1]\n pos3 = list_pos[2]\n term = ''\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'adj-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'noun' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4\n ] == 'scon' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'scon' and pos3[0:4\n ] == 'adv-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'verb' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n for i in deletes:\n if i in anotador:\n ind = anotador.index(i)\n anotador.pop(ind)\n elapsed_time = time() - start_time\n txt = 'PATRONES, DELETE' + ' (' + str(cont) + ') NEW LIST SIZE: (' + str(\n len(anotador)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('PATRONES DELETE', cont, len(anotador), elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return anotador\n\n\ndef quit_plural(valuelist):\n start_time = time()\n file = open('./data/numberlist_es', 'r', encoding='utf-8')\n read = file.readlines()\n plural = []\n cont = 0\n for i in valuelist:\n ind = valuelist.index(i)\n term = i.replace(',', '').replace('-', ' ')\n valuelist[ind] = term\n plu = ''\n if 'es' in term[-2:] or 's' in term[-1:]:\n slp = term.split(' ')\n for n in read:\n if n[:-1] in slp:\n plu = i\n if not len(plu):\n for j in slp:\n if 'es' in j[-2:] and 't' not in j[-3:-2] and 'l' not in j[\n -3:-2] or 'les' in j[-3:]:\n plu += ' ' + j[:-2]\n if 'on' in plu[-2:]:\n plu = ' ' + plu[:-2] + 'ón'\n if 'v' in plu[-1:]:\n plu = ' ' + plu + 'e'\n if 'bl' in plu[-2:]:\n plu = ' ' + plu + 'e'\n if 'br' in plu[-2:]:\n plu = ' ' + plu + 'e'\n elif 's' in j[-1:]:\n plu += ' ' + j[:-1]\n pos = slp.index(j)\n if pos > 0:\n bef = slp[0]\n if 'n' in bef[-1:] and 'ón' not in bef[-2:]:\n splb = plu.split(' ')\n firts = splb[1]\n if 'n' not in firts[-1:]:\n pass\n else:\n plu0 = firts[:-1]\n join1 = ' '.join(splb[2:])\n plu = plu0 + ' ' + join1\n else:\n plu += ' ' + j\n ind = valuelist.index(term)\n valuelist[ind] = plu.strip()\n cont = cont + 1\n quit_plu = []\n nuevalista = set(valuelist)\n for i in nuevalista:\n quit_plu.append(i)\n deletes = []\n new = []\n for i in valuelist:\n if i not in new:\n new.append(i)\n else:\n deletes.append(i)\n elapsed_time = time() - start_time\n txt = 'PLURAL, DELETE' + ' (' + str(len(valuelist) - len(quit_plu)\n ) + ') NEW LIST SIZE: (' + str(len(quit_plu)) + ') TIME: (' + str(\n elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('PLURALES DELETE', len(valuelist) - len(quit_plu), len(quit_plu),\n elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return quit_plu\n\n\n<mask token>\n\n\ndef quit_tilds(s):\n replacements = ('á', 'a'), ('é', 'e'), ('í', 'i'), ('ó', 'o'), ('ú', 'u')\n for a, b in replacements:\n s = s.replace(a, b)\n return s\n\n\n<mask token>\n\n\ndef main(read, lang_in):\n start_time = time()\n text = readFile(read)\n date = '2020-06-03'\n lang = lang_in\n termlist = text.split('| ')\n print('RECIBE', termlist)\n clean_text = clean_terms(termlist, lang_in)\n join_clean_text = '| '.join(clean_text).replace('-', '').replace(',', ''\n ).replace(';', '')\n anotador = annotate_timex(join_clean_text, date, lang)\n anotador.sort()\n if lang_in == 'es':\n pattern = delete_pattern(anotador)\n plural = quit_plural(pattern)\n numbers = delete_numbers(plural)\n tildes = acentos(numbers)\n stop2 = clean_terms(tildes, lang_in)\n print('FINALES', stop2)\n \"\"\"new=open('../data/clean_terms_freq4.txt', 'w')#se imprime lo que se queda\n\n\tfor i in stop2:\n\t new.write(i+'\n')\n\tnew.close()\n\telapsed_time=time()-start_time\n\tprint('Main', elapsed_time)\n\treturn(stop2)\"\"\"\n",
"step-2": "<mask token>\n\n\ndef preprocessing_terms(termlist, lang_in, timeEx, patternBasedClean,\n pluralClean, numbersClean, accentClean):\n date = '2020-06-03'\n print('terms:', termlist)\n print('lang:', lang_in)\n processedTerms = clean_terms(termlist, lang_in)\n print('This is processedTerms ')\n print(processedTerms)\n if timeEx == True:\n processedTerms = '| '.join(processedTerms).replace('-', '').replace(','\n , '').replace(';', '')\n processedTerms = annotate_timex(processedTerms, date, lang_in)\n processedTerms.sort()\n if lang_in == 'es' and patternBasedClean == True:\n stanza.download('es')\n pos_tagger = stanza.Pipeline('es')\n processedTerms = delete_pattern(processedTerms, pos_tagger)\n if lang_in == 'en' and patternBasedClean == True:\n stanza.download('en')\n pos_tagger = stanza.Pipeline('en')\n processedTerms = delete_pattern(processedTerms, pos_tagger)\n if lang_in == 'es' and pluralClean == True:\n processedTerms = quit_plural(processedTerms)\n if numbersClean == True:\n processedTerms = delete_numbers(processedTerms)\n if accentClean == True:\n processedTerms = acentos(processedTerms)\n processedTerms = clean_terms(processedTerms, lang_in)\n return processedTerms\n\n\ndef clean_terms(termlist, lang_in):\n start_time = time()\n if lang_in == 'es':\n stop = stopwords.words('spanish')\n file = open(sw_spanish, 'r', encoding='utf-8')\n mystop = file.readlines()\n elif lang_in == 'en':\n stop = stopwords.words('english')\n file = open(sw_english, 'r', encoding='utf-8')\n mystop = file.readlines()\n clean_list = []\n cont = 0\n for i in mystop:\n stop.append(i.strip())\n deletes = []\n for i in termlist:\n k = i.strip(',.:')\n if k.lower() in stop or k in stop:\n deletes.append(k)\n elif k.lower() not in stop or k not in stop:\n clean_list.append(k.replace(',', '').replace('-', ''))\n print(deletes)\n cont = len(termlist) - len(clean_list)\n elapsed_time = time() - start_time\n txt = 'CLEAN_TERMS, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len\n (clean_list)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n print('CLEAN_TERMS, DELETE', cont, len(clean_list), elapsed_time)\n return clean_list\n\n\ndef annotate_timex(text, date, lang):\n f = open('texto.txt', 'w')\n f.write(text)\n textanotador2 = ''\n start_time = time()\n url = 'https://annotador.oeg.fi.upm.es/annotate'\n params = ('{\"inputText\":\"' + text +\n '\",\"inputDate\":\"\",\"domain\":\"legal\",\"lan\":\"' + lang +\n '\",\"format\":\"timex3\"}')\n headers = {'Content-Type': 'application/json;charset=utf-8'}\n response = requests.request('POST', url, headers=headers, data=params.\n encode('utf8'))\n textanotador = response.text\n print('ENTRA ANOTADOR')\n print(textanotador)\n code = response.status_code\n list_anotador = textanotador.split('|')\n print(list_anotador)\n deletes = []\n cont = 0\n for i in list_anotador:\n if '<' in i and len(i) > 2:\n cont = cont + 1\n deletes.append(i)\n ind = list_anotador.index(i)\n list_anotador.pop(ind)\n for i in list_anotador:\n if '<' in i and len(i) > 2:\n print(i)\n cont = cont + 1\n deletes.append(i)\n ind = list_anotador.index(i)\n list_anotador.pop(ind)\n anotador = []\n for i in list_anotador:\n anotador.append(i.strip().replace(',', ''))\n if code != 200:\n print(\n 'WARNING: Annotador is down. Temporal expressions could not be removed.'\n )\n anotador = text.split('| ')\n conts_log.error(\n 'Annotador is down. Temporal expressions could not be removed.',\n code)\n else:\n elapsed_time = time() - start_time\n txt = 'AÑOTADOR, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len\n (anotador)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('AÑOTADOR DELETE', cont, len(anotador), elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return anotador\n\n\ndef infinitive(verb):\n if verb[-2:] == 'ar' or verb[-2:] == 'er' or verb[-2:] == 'ir':\n verb = verb\n else:\n if verb[-2:] == 'rá':\n verb = verb[:-1]\n if verb[-2:] == 'án':\n verb = verb[:-2]\n if verb[-2:] == 'ré':\n verb = verb[:-1]\n return verb\n\n\ndef delete_pattern(anotador, pos_tagger):\n total = 0\n deletes = []\n start_time = time()\n lemmas_list = []\n cont = 0\n cont_inf = 0\n cont_post = 0\n for i in anotador:\n print('this is i')\n print(i)\n if len(i) > 1:\n doc = pos_tagger(i)\n sent = doc.sentences[0]\n word = sent.words\n tag = []\n for token in word:\n pos = token.upos\n term = token.text\n tupla = term, pos\n tag.append(tupla)\n print(token.text)\n print(pos)\n print('this is tag ')\n print(tag)\n total = total + 1\n joini = i\n list_pos = []\n spl = joini.split(' ')\n if joini != '':\n join_tag = ''\n for t in tag:\n print('this is t')\n print(t)\n if t[1] == 'AUX':\n doc = nlp(t[0])\n lemlist = [tok.lemma_ for tok in doc]\n lem = ''.join(lemlist)\n lemmas_list.append(lem)\n if lem == i:\n lem = t[0]\n list_pos.append('aux--' + str(lem))\n if len(spl) == 1:\n ind = anotador.index(str(i))\n anotador[ind] = str(lem)\n if t[1] == 'NOUN':\n list_pos.append('noun-' + str(t[0]))\n if t[1] == 'VERB':\n cont_inf = cont_inf + 1\n doc = nlp(t[0])\n for tok in doc:\n l = tok.lemma_\n if l != t[0]:\n cont_post = cont_post + 1\n lemlist = [tok.lemma_ for tok in doc]\n lem = ''.join(lemlist)\n lemmas_list.append(lem)\n if lem == i:\n lem = t[0]\n list_pos.append('verb-' + str(lem))\n if len(spl) == 1:\n ind = anotador.index(str(i))\n anotador[ind] = str(lem)\n if t[1] == 'ADV':\n list_pos.append('adv--' + str(t[0]))\n if t[1] == 'ADJ':\n list_pos.append('adj--' + str(t[0]))\n if t[1] == 'SCONJ':\n list_pos.append('sconj' + str(t[0]))\n spl_i = joini.split(' ')\n if len(list_pos) == 1:\n pos1 = list_pos[0]\n if pos1[0:4] == 'adv-':\n term = pos1[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n elif len(list_pos) == 2 and len(spl_i) == 2:\n pos1 = list_pos[0]\n pos2 = list_pos[1]\n term = ''\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'adj-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n elif len(list_pos) == 3 and len(spl_i) == 3:\n pos1 = list_pos[0]\n pos2 = list_pos[1]\n pos3 = list_pos[2]\n term = ''\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'adj-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'noun' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4\n ] == 'scon' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'scon' and pos3[0:4\n ] == 'adv-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'verb' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n for i in deletes:\n if i in anotador:\n ind = anotador.index(i)\n anotador.pop(ind)\n elapsed_time = time() - start_time\n txt = 'PATRONES, DELETE' + ' (' + str(cont) + ') NEW LIST SIZE: (' + str(\n len(anotador)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('PATRONES DELETE', cont, len(anotador), elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return anotador\n\n\ndef quit_plural(valuelist):\n start_time = time()\n file = open('./data/numberlist_es', 'r', encoding='utf-8')\n read = file.readlines()\n plural = []\n cont = 0\n for i in valuelist:\n ind = valuelist.index(i)\n term = i.replace(',', '').replace('-', ' ')\n valuelist[ind] = term\n plu = ''\n if 'es' in term[-2:] or 's' in term[-1:]:\n slp = term.split(' ')\n for n in read:\n if n[:-1] in slp:\n plu = i\n if not len(plu):\n for j in slp:\n if 'es' in j[-2:] and 't' not in j[-3:-2] and 'l' not in j[\n -3:-2] or 'les' in j[-3:]:\n plu += ' ' + j[:-2]\n if 'on' in plu[-2:]:\n plu = ' ' + plu[:-2] + 'ón'\n if 'v' in plu[-1:]:\n plu = ' ' + plu + 'e'\n if 'bl' in plu[-2:]:\n plu = ' ' + plu + 'e'\n if 'br' in plu[-2:]:\n plu = ' ' + plu + 'e'\n elif 's' in j[-1:]:\n plu += ' ' + j[:-1]\n pos = slp.index(j)\n if pos > 0:\n bef = slp[0]\n if 'n' in bef[-1:] and 'ón' not in bef[-2:]:\n splb = plu.split(' ')\n firts = splb[1]\n if 'n' not in firts[-1:]:\n pass\n else:\n plu0 = firts[:-1]\n join1 = ' '.join(splb[2:])\n plu = plu0 + ' ' + join1\n else:\n plu += ' ' + j\n ind = valuelist.index(term)\n valuelist[ind] = plu.strip()\n cont = cont + 1\n quit_plu = []\n nuevalista = set(valuelist)\n for i in nuevalista:\n quit_plu.append(i)\n deletes = []\n new = []\n for i in valuelist:\n if i not in new:\n new.append(i)\n else:\n deletes.append(i)\n elapsed_time = time() - start_time\n txt = 'PLURAL, DELETE' + ' (' + str(len(valuelist) - len(quit_plu)\n ) + ') NEW LIST SIZE: (' + str(len(quit_plu)) + ') TIME: (' + str(\n elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('PLURALES DELETE', len(valuelist) - len(quit_plu), len(quit_plu),\n elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return quit_plu\n\n\n<mask token>\n\n\ndef readFile(read):\n start_time = time()\n text = ''\n for i in read:\n if i[-1:] == '\\n':\n spl = i[:-1].split('\\t')\n else:\n spl = i.split('\\t')\n term = spl[1].replace('-', '').replace(',', '').replace(';', '')\n spl2 = term.split(' ')\n text += '| ' + spl[1]\n elapsed_time = time() - start_time\n return text\n\n\ndef quit_tilds(s):\n replacements = ('á', 'a'), ('é', 'e'), ('í', 'i'), ('ó', 'o'), ('ú', 'u')\n for a, b in replacements:\n s = s.replace(a, b)\n return s\n\n\n<mask token>\n\n\ndef main(read, lang_in):\n start_time = time()\n text = readFile(read)\n date = '2020-06-03'\n lang = lang_in\n termlist = text.split('| ')\n print('RECIBE', termlist)\n clean_text = clean_terms(termlist, lang_in)\n join_clean_text = '| '.join(clean_text).replace('-', '').replace(',', ''\n ).replace(';', '')\n anotador = annotate_timex(join_clean_text, date, lang)\n anotador.sort()\n if lang_in == 'es':\n pattern = delete_pattern(anotador)\n plural = quit_plural(pattern)\n numbers = delete_numbers(plural)\n tildes = acentos(numbers)\n stop2 = clean_terms(tildes, lang_in)\n print('FINALES', stop2)\n \"\"\"new=open('../data/clean_terms_freq4.txt', 'w')#se imprime lo que se queda\n\n\tfor i in stop2:\n\t new.write(i+'\n')\n\tnew.close()\n\telapsed_time=time()-start_time\n\tprint('Main', elapsed_time)\n\treturn(stop2)\"\"\"\n",
"step-3": "<mask token>\n\n\ndef preprocessing_terms(termlist, lang_in, timeEx, patternBasedClean,\n pluralClean, numbersClean, accentClean):\n date = '2020-06-03'\n print('terms:', termlist)\n print('lang:', lang_in)\n processedTerms = clean_terms(termlist, lang_in)\n print('This is processedTerms ')\n print(processedTerms)\n if timeEx == True:\n processedTerms = '| '.join(processedTerms).replace('-', '').replace(','\n , '').replace(';', '')\n processedTerms = annotate_timex(processedTerms, date, lang_in)\n processedTerms.sort()\n if lang_in == 'es' and patternBasedClean == True:\n stanza.download('es')\n pos_tagger = stanza.Pipeline('es')\n processedTerms = delete_pattern(processedTerms, pos_tagger)\n if lang_in == 'en' and patternBasedClean == True:\n stanza.download('en')\n pos_tagger = stanza.Pipeline('en')\n processedTerms = delete_pattern(processedTerms, pos_tagger)\n if lang_in == 'es' and pluralClean == True:\n processedTerms = quit_plural(processedTerms)\n if numbersClean == True:\n processedTerms = delete_numbers(processedTerms)\n if accentClean == True:\n processedTerms = acentos(processedTerms)\n processedTerms = clean_terms(processedTerms, lang_in)\n return processedTerms\n\n\ndef clean_terms(termlist, lang_in):\n start_time = time()\n if lang_in == 'es':\n stop = stopwords.words('spanish')\n file = open(sw_spanish, 'r', encoding='utf-8')\n mystop = file.readlines()\n elif lang_in == 'en':\n stop = stopwords.words('english')\n file = open(sw_english, 'r', encoding='utf-8')\n mystop = file.readlines()\n clean_list = []\n cont = 0\n for i in mystop:\n stop.append(i.strip())\n deletes = []\n for i in termlist:\n k = i.strip(',.:')\n if k.lower() in stop or k in stop:\n deletes.append(k)\n elif k.lower() not in stop or k not in stop:\n clean_list.append(k.replace(',', '').replace('-', ''))\n print(deletes)\n cont = len(termlist) - len(clean_list)\n elapsed_time = time() - start_time\n txt = 'CLEAN_TERMS, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len\n (clean_list)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n print('CLEAN_TERMS, DELETE', cont, len(clean_list), elapsed_time)\n return clean_list\n\n\ndef annotate_timex(text, date, lang):\n f = open('texto.txt', 'w')\n f.write(text)\n textanotador2 = ''\n start_time = time()\n url = 'https://annotador.oeg.fi.upm.es/annotate'\n params = ('{\"inputText\":\"' + text +\n '\",\"inputDate\":\"\",\"domain\":\"legal\",\"lan\":\"' + lang +\n '\",\"format\":\"timex3\"}')\n headers = {'Content-Type': 'application/json;charset=utf-8'}\n response = requests.request('POST', url, headers=headers, data=params.\n encode('utf8'))\n textanotador = response.text\n print('ENTRA ANOTADOR')\n print(textanotador)\n code = response.status_code\n list_anotador = textanotador.split('|')\n print(list_anotador)\n deletes = []\n cont = 0\n for i in list_anotador:\n if '<' in i and len(i) > 2:\n cont = cont + 1\n deletes.append(i)\n ind = list_anotador.index(i)\n list_anotador.pop(ind)\n for i in list_anotador:\n if '<' in i and len(i) > 2:\n print(i)\n cont = cont + 1\n deletes.append(i)\n ind = list_anotador.index(i)\n list_anotador.pop(ind)\n anotador = []\n for i in list_anotador:\n anotador.append(i.strip().replace(',', ''))\n if code != 200:\n print(\n 'WARNING: Annotador is down. Temporal expressions could not be removed.'\n )\n anotador = text.split('| ')\n conts_log.error(\n 'Annotador is down. Temporal expressions could not be removed.',\n code)\n else:\n elapsed_time = time() - start_time\n txt = 'AÑOTADOR, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len\n (anotador)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('AÑOTADOR DELETE', cont, len(anotador), elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return anotador\n\n\ndef infinitive(verb):\n if verb[-2:] == 'ar' or verb[-2:] == 'er' or verb[-2:] == 'ir':\n verb = verb\n else:\n if verb[-2:] == 'rá':\n verb = verb[:-1]\n if verb[-2:] == 'án':\n verb = verb[:-2]\n if verb[-2:] == 'ré':\n verb = verb[:-1]\n return verb\n\n\ndef delete_pattern(anotador, pos_tagger):\n total = 0\n deletes = []\n start_time = time()\n lemmas_list = []\n cont = 0\n cont_inf = 0\n cont_post = 0\n for i in anotador:\n print('this is i')\n print(i)\n if len(i) > 1:\n doc = pos_tagger(i)\n sent = doc.sentences[0]\n word = sent.words\n tag = []\n for token in word:\n pos = token.upos\n term = token.text\n tupla = term, pos\n tag.append(tupla)\n print(token.text)\n print(pos)\n print('this is tag ')\n print(tag)\n total = total + 1\n joini = i\n list_pos = []\n spl = joini.split(' ')\n if joini != '':\n join_tag = ''\n for t in tag:\n print('this is t')\n print(t)\n if t[1] == 'AUX':\n doc = nlp(t[0])\n lemlist = [tok.lemma_ for tok in doc]\n lem = ''.join(lemlist)\n lemmas_list.append(lem)\n if lem == i:\n lem = t[0]\n list_pos.append('aux--' + str(lem))\n if len(spl) == 1:\n ind = anotador.index(str(i))\n anotador[ind] = str(lem)\n if t[1] == 'NOUN':\n list_pos.append('noun-' + str(t[0]))\n if t[1] == 'VERB':\n cont_inf = cont_inf + 1\n doc = nlp(t[0])\n for tok in doc:\n l = tok.lemma_\n if l != t[0]:\n cont_post = cont_post + 1\n lemlist = [tok.lemma_ for tok in doc]\n lem = ''.join(lemlist)\n lemmas_list.append(lem)\n if lem == i:\n lem = t[0]\n list_pos.append('verb-' + str(lem))\n if len(spl) == 1:\n ind = anotador.index(str(i))\n anotador[ind] = str(lem)\n if t[1] == 'ADV':\n list_pos.append('adv--' + str(t[0]))\n if t[1] == 'ADJ':\n list_pos.append('adj--' + str(t[0]))\n if t[1] == 'SCONJ':\n list_pos.append('sconj' + str(t[0]))\n spl_i = joini.split(' ')\n if len(list_pos) == 1:\n pos1 = list_pos[0]\n if pos1[0:4] == 'adv-':\n term = pos1[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n elif len(list_pos) == 2 and len(spl_i) == 2:\n pos1 = list_pos[0]\n pos2 = list_pos[1]\n term = ''\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'adj-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n elif len(list_pos) == 3 and len(spl_i) == 3:\n pos1 = list_pos[0]\n pos2 = list_pos[1]\n pos3 = list_pos[2]\n term = ''\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'adj-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'noun' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4\n ] == 'scon' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'scon' and pos3[0:4\n ] == 'adv-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'verb' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n for i in deletes:\n if i in anotador:\n ind = anotador.index(i)\n anotador.pop(ind)\n elapsed_time = time() - start_time\n txt = 'PATRONES, DELETE' + ' (' + str(cont) + ') NEW LIST SIZE: (' + str(\n len(anotador)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('PATRONES DELETE', cont, len(anotador), elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return anotador\n\n\ndef quit_plural(valuelist):\n start_time = time()\n file = open('./data/numberlist_es', 'r', encoding='utf-8')\n read = file.readlines()\n plural = []\n cont = 0\n for i in valuelist:\n ind = valuelist.index(i)\n term = i.replace(',', '').replace('-', ' ')\n valuelist[ind] = term\n plu = ''\n if 'es' in term[-2:] or 's' in term[-1:]:\n slp = term.split(' ')\n for n in read:\n if n[:-1] in slp:\n plu = i\n if not len(plu):\n for j in slp:\n if 'es' in j[-2:] and 't' not in j[-3:-2] and 'l' not in j[\n -3:-2] or 'les' in j[-3:]:\n plu += ' ' + j[:-2]\n if 'on' in plu[-2:]:\n plu = ' ' + plu[:-2] + 'ón'\n if 'v' in plu[-1:]:\n plu = ' ' + plu + 'e'\n if 'bl' in plu[-2:]:\n plu = ' ' + plu + 'e'\n if 'br' in plu[-2:]:\n plu = ' ' + plu + 'e'\n elif 's' in j[-1:]:\n plu += ' ' + j[:-1]\n pos = slp.index(j)\n if pos > 0:\n bef = slp[0]\n if 'n' in bef[-1:] and 'ón' not in bef[-2:]:\n splb = plu.split(' ')\n firts = splb[1]\n if 'n' not in firts[-1:]:\n pass\n else:\n plu0 = firts[:-1]\n join1 = ' '.join(splb[2:])\n plu = plu0 + ' ' + join1\n else:\n plu += ' ' + j\n ind = valuelist.index(term)\n valuelist[ind] = plu.strip()\n cont = cont + 1\n quit_plu = []\n nuevalista = set(valuelist)\n for i in nuevalista:\n quit_plu.append(i)\n deletes = []\n new = []\n for i in valuelist:\n if i not in new:\n new.append(i)\n else:\n deletes.append(i)\n elapsed_time = time() - start_time\n txt = 'PLURAL, DELETE' + ' (' + str(len(valuelist) - len(quit_plu)\n ) + ') NEW LIST SIZE: (' + str(len(quit_plu)) + ') TIME: (' + str(\n elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('PLURALES DELETE', len(valuelist) - len(quit_plu), len(quit_plu),\n elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return quit_plu\n\n\n<mask token>\n\n\ndef readFile(read):\n start_time = time()\n text = ''\n for i in read:\n if i[-1:] == '\\n':\n spl = i[:-1].split('\\t')\n else:\n spl = i.split('\\t')\n term = spl[1].replace('-', '').replace(',', '').replace(';', '')\n spl2 = term.split(' ')\n text += '| ' + spl[1]\n elapsed_time = time() - start_time\n return text\n\n\ndef quit_tilds(s):\n replacements = ('á', 'a'), ('é', 'e'), ('í', 'i'), ('ó', 'o'), ('ú', 'u')\n for a, b in replacements:\n s = s.replace(a, b)\n return s\n\n\ndef acentos(last):\n start_time = time()\n til = []\n list_acentos = []\n for i in last:\n acento = re.search('[áéíóúÁÉÍÓÚ]+', i)\n if acento != None:\n sin = quit_tilds(i)\n list_acentos.append(i)\n til.append(sin)\n else:\n til.append(i)\n til2 = []\n delete = []\n for i in til:\n if i not in til2:\n til2.append(i)\n else:\n delete.append(i)\n indices = []\n delete2 = []\n for i in last:\n if i in delete and i not in indices:\n indices.append(i)\n delete2.append(i)\n for i in delete2:\n ind = last.index(i)\n last.pop(ind)\n last.sort()\n elapsed_time = time() - start_time\n return last\n\n\ndef main(read, lang_in):\n start_time = time()\n text = readFile(read)\n date = '2020-06-03'\n lang = lang_in\n termlist = text.split('| ')\n print('RECIBE', termlist)\n clean_text = clean_terms(termlist, lang_in)\n join_clean_text = '| '.join(clean_text).replace('-', '').replace(',', ''\n ).replace(';', '')\n anotador = annotate_timex(join_clean_text, date, lang)\n anotador.sort()\n if lang_in == 'es':\n pattern = delete_pattern(anotador)\n plural = quit_plural(pattern)\n numbers = delete_numbers(plural)\n tildes = acentos(numbers)\n stop2 = clean_terms(tildes, lang_in)\n print('FINALES', stop2)\n \"\"\"new=open('../data/clean_terms_freq4.txt', 'w')#se imprime lo que se queda\n\n\tfor i in stop2:\n\t new.write(i+'\n')\n\tnew.close()\n\telapsed_time=time()-start_time\n\tprint('Main', elapsed_time)\n\treturn(stop2)\"\"\"\n",
"step-4": "import os\nimport json\nimport csv\nimport re\nimport requests\nimport spacy\nimport nltk\nfrom nltk.parse import CoreNLPParser\nfrom nltk.corpus import stopwords\nfrom nltk.stem import PorterStemmer\nstemmer = PorterStemmer()\nfrom time import time\nnlp = spacy.load('es_core_news_sm')\nfrom modules_api import conts_log\nsw_spanish = './data/stop-esp.txt'\nsw_english = './data/stop-eng.txt'\ninner_spanish = './data/inner-stop-esp.txt'\ninner_english = './data/inner-stop-eng.txt'\nimport stanza\n<mask token>\n\n\ndef preprocessing_terms(termlist, lang_in, timeEx, patternBasedClean,\n pluralClean, numbersClean, accentClean):\n date = '2020-06-03'\n print('terms:', termlist)\n print('lang:', lang_in)\n processedTerms = clean_terms(termlist, lang_in)\n print('This is processedTerms ')\n print(processedTerms)\n if timeEx == True:\n processedTerms = '| '.join(processedTerms).replace('-', '').replace(','\n , '').replace(';', '')\n processedTerms = annotate_timex(processedTerms, date, lang_in)\n processedTerms.sort()\n if lang_in == 'es' and patternBasedClean == True:\n stanza.download('es')\n pos_tagger = stanza.Pipeline('es')\n processedTerms = delete_pattern(processedTerms, pos_tagger)\n if lang_in == 'en' and patternBasedClean == True:\n stanza.download('en')\n pos_tagger = stanza.Pipeline('en')\n processedTerms = delete_pattern(processedTerms, pos_tagger)\n if lang_in == 'es' and pluralClean == True:\n processedTerms = quit_plural(processedTerms)\n if numbersClean == True:\n processedTerms = delete_numbers(processedTerms)\n if accentClean == True:\n processedTerms = acentos(processedTerms)\n processedTerms = clean_terms(processedTerms, lang_in)\n return processedTerms\n\n\ndef clean_terms(termlist, lang_in):\n start_time = time()\n if lang_in == 'es':\n stop = stopwords.words('spanish')\n file = open(sw_spanish, 'r', encoding='utf-8')\n mystop = file.readlines()\n elif lang_in == 'en':\n stop = stopwords.words('english')\n file = open(sw_english, 'r', encoding='utf-8')\n mystop = file.readlines()\n clean_list = []\n cont = 0\n for i in mystop:\n stop.append(i.strip())\n deletes = []\n for i in termlist:\n k = i.strip(',.:')\n if k.lower() in stop or k in stop:\n deletes.append(k)\n elif k.lower() not in stop or k not in stop:\n clean_list.append(k.replace(',', '').replace('-', ''))\n print(deletes)\n cont = len(termlist) - len(clean_list)\n elapsed_time = time() - start_time\n txt = 'CLEAN_TERMS, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len\n (clean_list)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n print('CLEAN_TERMS, DELETE', cont, len(clean_list), elapsed_time)\n return clean_list\n\n\ndef annotate_timex(text, date, lang):\n f = open('texto.txt', 'w')\n f.write(text)\n textanotador2 = ''\n start_time = time()\n url = 'https://annotador.oeg.fi.upm.es/annotate'\n params = ('{\"inputText\":\"' + text +\n '\",\"inputDate\":\"\",\"domain\":\"legal\",\"lan\":\"' + lang +\n '\",\"format\":\"timex3\"}')\n headers = {'Content-Type': 'application/json;charset=utf-8'}\n response = requests.request('POST', url, headers=headers, data=params.\n encode('utf8'))\n textanotador = response.text\n print('ENTRA ANOTADOR')\n print(textanotador)\n code = response.status_code\n list_anotador = textanotador.split('|')\n print(list_anotador)\n deletes = []\n cont = 0\n for i in list_anotador:\n if '<' in i and len(i) > 2:\n cont = cont + 1\n deletes.append(i)\n ind = list_anotador.index(i)\n list_anotador.pop(ind)\n for i in list_anotador:\n if '<' in i and len(i) > 2:\n print(i)\n cont = cont + 1\n deletes.append(i)\n ind = list_anotador.index(i)\n list_anotador.pop(ind)\n anotador = []\n for i in list_anotador:\n anotador.append(i.strip().replace(',', ''))\n if code != 200:\n print(\n 'WARNING: Annotador is down. Temporal expressions could not be removed.'\n )\n anotador = text.split('| ')\n conts_log.error(\n 'Annotador is down. Temporal expressions could not be removed.',\n code)\n else:\n elapsed_time = time() - start_time\n txt = 'AÑOTADOR, DELETE (' + str(cont) + ') NEW LIST SIZE: (' + str(len\n (anotador)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('AÑOTADOR DELETE', cont, len(anotador), elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return anotador\n\n\ndef infinitive(verb):\n if verb[-2:] == 'ar' or verb[-2:] == 'er' or verb[-2:] == 'ir':\n verb = verb\n else:\n if verb[-2:] == 'rá':\n verb = verb[:-1]\n if verb[-2:] == 'án':\n verb = verb[:-2]\n if verb[-2:] == 'ré':\n verb = verb[:-1]\n return verb\n\n\ndef delete_pattern(anotador, pos_tagger):\n total = 0\n deletes = []\n start_time = time()\n lemmas_list = []\n cont = 0\n cont_inf = 0\n cont_post = 0\n for i in anotador:\n print('this is i')\n print(i)\n if len(i) > 1:\n doc = pos_tagger(i)\n sent = doc.sentences[0]\n word = sent.words\n tag = []\n for token in word:\n pos = token.upos\n term = token.text\n tupla = term, pos\n tag.append(tupla)\n print(token.text)\n print(pos)\n print('this is tag ')\n print(tag)\n total = total + 1\n joini = i\n list_pos = []\n spl = joini.split(' ')\n if joini != '':\n join_tag = ''\n for t in tag:\n print('this is t')\n print(t)\n if t[1] == 'AUX':\n doc = nlp(t[0])\n lemlist = [tok.lemma_ for tok in doc]\n lem = ''.join(lemlist)\n lemmas_list.append(lem)\n if lem == i:\n lem = t[0]\n list_pos.append('aux--' + str(lem))\n if len(spl) == 1:\n ind = anotador.index(str(i))\n anotador[ind] = str(lem)\n if t[1] == 'NOUN':\n list_pos.append('noun-' + str(t[0]))\n if t[1] == 'VERB':\n cont_inf = cont_inf + 1\n doc = nlp(t[0])\n for tok in doc:\n l = tok.lemma_\n if l != t[0]:\n cont_post = cont_post + 1\n lemlist = [tok.lemma_ for tok in doc]\n lem = ''.join(lemlist)\n lemmas_list.append(lem)\n if lem == i:\n lem = t[0]\n list_pos.append('verb-' + str(lem))\n if len(spl) == 1:\n ind = anotador.index(str(i))\n anotador[ind] = str(lem)\n if t[1] == 'ADV':\n list_pos.append('adv--' + str(t[0]))\n if t[1] == 'ADJ':\n list_pos.append('adj--' + str(t[0]))\n if t[1] == 'SCONJ':\n list_pos.append('sconj' + str(t[0]))\n spl_i = joini.split(' ')\n if len(list_pos) == 1:\n pos1 = list_pos[0]\n if pos1[0:4] == 'adv-':\n term = pos1[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n elif len(list_pos) == 2 and len(spl_i) == 2:\n pos1 = list_pos[0]\n pos2 = list_pos[1]\n term = ''\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'adj-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adv-' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'adv-':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n elif len(list_pos) == 3 and len(spl_i) == 3:\n pos1 = list_pos[0]\n pos2 = list_pos[1]\n pos3 = list_pos[2]\n term = ''\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'verb':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'aux-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'noun':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'adj-':\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'noun' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'aux-' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'adv-' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'noun' and pos2[0:4] == 'adv-' and pos3[0:4\n ] == 'scon' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'scon' and pos3[0:4\n ] == 'adv-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'aux-' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'verb' and pos2[0:4] == 'verb' and pos3[0:4\n ] == 'verb' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n if pos1[0:4] == 'adj-' and pos2[0:4] == 'noun' and pos3[0:4\n ] == 'adj-' and joini in anotador:\n term = pos1[5:] + ' ' + pos2[5:] + ' ' + pos3[5:]\n deletes.append(joini)\n ind = anotador.index(joini)\n cont = cont + 1\n for i in deletes:\n if i in anotador:\n ind = anotador.index(i)\n anotador.pop(ind)\n elapsed_time = time() - start_time\n txt = 'PATRONES, DELETE' + ' (' + str(cont) + ') NEW LIST SIZE: (' + str(\n len(anotador)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('PATRONES DELETE', cont, len(anotador), elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return anotador\n\n\ndef quit_plural(valuelist):\n start_time = time()\n file = open('./data/numberlist_es', 'r', encoding='utf-8')\n read = file.readlines()\n plural = []\n cont = 0\n for i in valuelist:\n ind = valuelist.index(i)\n term = i.replace(',', '').replace('-', ' ')\n valuelist[ind] = term\n plu = ''\n if 'es' in term[-2:] or 's' in term[-1:]:\n slp = term.split(' ')\n for n in read:\n if n[:-1] in slp:\n plu = i\n if not len(plu):\n for j in slp:\n if 'es' in j[-2:] and 't' not in j[-3:-2] and 'l' not in j[\n -3:-2] or 'les' in j[-3:]:\n plu += ' ' + j[:-2]\n if 'on' in plu[-2:]:\n plu = ' ' + plu[:-2] + 'ón'\n if 'v' in plu[-1:]:\n plu = ' ' + plu + 'e'\n if 'bl' in plu[-2:]:\n plu = ' ' + plu + 'e'\n if 'br' in plu[-2:]:\n plu = ' ' + plu + 'e'\n elif 's' in j[-1:]:\n plu += ' ' + j[:-1]\n pos = slp.index(j)\n if pos > 0:\n bef = slp[0]\n if 'n' in bef[-1:] and 'ón' not in bef[-2:]:\n splb = plu.split(' ')\n firts = splb[1]\n if 'n' not in firts[-1:]:\n pass\n else:\n plu0 = firts[:-1]\n join1 = ' '.join(splb[2:])\n plu = plu0 + ' ' + join1\n else:\n plu += ' ' + j\n ind = valuelist.index(term)\n valuelist[ind] = plu.strip()\n cont = cont + 1\n quit_plu = []\n nuevalista = set(valuelist)\n for i in nuevalista:\n quit_plu.append(i)\n deletes = []\n new = []\n for i in valuelist:\n if i not in new:\n new.append(i)\n else:\n deletes.append(i)\n elapsed_time = time() - start_time\n txt = 'PLURAL, DELETE' + ' (' + str(len(valuelist) - len(quit_plu)\n ) + ') NEW LIST SIZE: (' + str(len(quit_plu)) + ') TIME: (' + str(\n elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('PLURALES DELETE', len(valuelist) - len(quit_plu), len(quit_plu),\n elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return quit_plu\n\n\ndef delete_numbers(list_):\n start_time = time()\n file = open('./data/numberlist_es', 'r', encoding='utf-8')\n read = file.readlines()\n cont = 0\n deletes = []\n for i in read:\n if i[-1:] == '\\n':\n i = i[:-1]\n for j in list_:\n if ' ' + i + ' ' in ' ' + j + ' ':\n deletes.append(j)\n ind = list_.index(j)\n cont = cont + 1\n list_.pop(ind)\n elapsed_time = time() - start_time\n txt = 'NUMBERS, DELETE' + ' (' + str(cont) + ') NEW LIST SIZE: (' + str(len\n (list_)) + ') TIME: (' + str(elapsed_time) + ')'\n joind = ', '.join(deletes)\n print('NUMEROS DELETE', cont, len(list_), elapsed_time)\n conts_log.information(txt, 'TERMS REMOVED: ' + joind)\n return list_\n\n\ndef readFile(read):\n start_time = time()\n text = ''\n for i in read:\n if i[-1:] == '\\n':\n spl = i[:-1].split('\\t')\n else:\n spl = i.split('\\t')\n term = spl[1].replace('-', '').replace(',', '').replace(';', '')\n spl2 = term.split(' ')\n text += '| ' + spl[1]\n elapsed_time = time() - start_time\n return text\n\n\ndef quit_tilds(s):\n replacements = ('á', 'a'), ('é', 'e'), ('í', 'i'), ('ó', 'o'), ('ú', 'u')\n for a, b in replacements:\n s = s.replace(a, b)\n return s\n\n\ndef acentos(last):\n start_time = time()\n til = []\n list_acentos = []\n for i in last:\n acento = re.search('[áéíóúÁÉÍÓÚ]+', i)\n if acento != None:\n sin = quit_tilds(i)\n list_acentos.append(i)\n til.append(sin)\n else:\n til.append(i)\n til2 = []\n delete = []\n for i in til:\n if i not in til2:\n til2.append(i)\n else:\n delete.append(i)\n indices = []\n delete2 = []\n for i in last:\n if i in delete and i not in indices:\n indices.append(i)\n delete2.append(i)\n for i in delete2:\n ind = last.index(i)\n last.pop(ind)\n last.sort()\n elapsed_time = time() - start_time\n return last\n\n\ndef main(read, lang_in):\n start_time = time()\n text = readFile(read)\n date = '2020-06-03'\n lang = lang_in\n termlist = text.split('| ')\n print('RECIBE', termlist)\n clean_text = clean_terms(termlist, lang_in)\n join_clean_text = '| '.join(clean_text).replace('-', '').replace(',', ''\n ).replace(';', '')\n anotador = annotate_timex(join_clean_text, date, lang)\n anotador.sort()\n if lang_in == 'es':\n pattern = delete_pattern(anotador)\n plural = quit_plural(pattern)\n numbers = delete_numbers(plural)\n tildes = acentos(numbers)\n stop2 = clean_terms(tildes, lang_in)\n print('FINALES', stop2)\n \"\"\"new=open('../data/clean_terms_freq4.txt', 'w')#se imprime lo que se queda\n\n\tfor i in stop2:\n\t new.write(i+'\n')\n\tnew.close()\n\telapsed_time=time()-start_time\n\tprint('Main', elapsed_time)\n\treturn(stop2)\"\"\"\n",
"step-5": "import os\nimport json\nimport csv\nimport re\nimport requests\nimport spacy\nimport nltk\nfrom nltk.parse import CoreNLPParser\nfrom nltk.corpus import stopwords\nfrom nltk.stem import PorterStemmer\nstemmer = PorterStemmer()\nfrom time import time\nnlp = spacy.load('es_core_news_sm')\nfrom modules_api import conts_log\nsw_spanish=\"./data/stop-esp.txt\"\nsw_english=\"./data/stop-eng.txt\"\ninner_spanish=\"./data/inner-stop-esp.txt\"\ninner_english=\"./data/inner-stop-eng.txt\"\nimport stanza\n\n\n### METODO PARA EL SERVICIO\n'''\ncomo el main de debajo. este método va a ser el controlador.\nMediante parámetros va a decidir qué procesos va a seguir\n\ntermList: array/lista de terminos\nlang: string con el idoma : es, en \n\n\ntimeEx: booleano que activa si se aplica timex o no\npatternBasedClean: booleano que activa si se aplican patrones o no\npluralClean: booleano que activa si se aplica limpieza de plurales o no\nnumbersClean: booleano que activa si se aplica limpieza de numeros o no\naccentClean: booleano que activa si se aplica limpieza de acentos o no\n\n\n'''\ndef preprocessing_terms(termlist, lang_in, timeEx, patternBasedClean, pluralClean, numbersClean, accentClean):\n \n date='2020-06-03' # esto debería ser automatico\n print('terms:', termlist)\n print('lang:', lang_in)\n \n # servicio básico, creo que se debería hacer siempre\n processedTerms=clean_terms(termlist, lang_in)\n \n \n \n print('This is processedTerms ')\n print(processedTerms)\n \n \n #print('this is timex' + timeEx)\n # Todo siempre sobre la misma variable: processedTerms. Da igual el camino que cojas. Usas la lista de terminos y se modifica.\n \n #opcional\n if(timeEx==True):\n processedTerms='| '.join(processedTerms).replace('-', '').replace(',', '').replace(';', '')\n processedTerms=annotate_timex(processedTerms, date, lang_in)\n processedTerms.sort()\n #opcional \n if((lang_in=='es') and (patternBasedClean==True)):\n stanza.download('es')\n pos_tagger=stanza.Pipeline('es')\n processedTerms=delete_pattern(processedTerms, pos_tagger)\n if((lang_in=='en') and (patternBasedClean==True)):\n stanza.download('en')\n pos_tagger=stanza.Pipeline('en')\n processedTerms=delete_pattern(processedTerms, pos_tagger)\n #opcional \n if((lang_in=='es') and (pluralClean==True)):\n processedTerms=quit_plural(processedTerms)\n #opcional\n if(numbersClean==True):\n processedTerms=delete_numbers(processedTerms)\n #opcional\n if(accentClean==True): \n processedTerms=acentos(processedTerms)\n #final clean \n processedTerms=clean_terms(processedTerms, lang_in)\n \n #devolvemos los terminos\n return processedTerms\n\n\n\n\n# 0 clean punctuation and stopwords\ndef clean_terms(termlist, lang_in):\n \n start_time=time()\n if(lang_in==\"es\"):\n \tstop=stopwords.words('spanish')\n \tfile=open(sw_spanish, 'r', encoding='utf-8')\n \tmystop=file.readlines()\n elif(lang_in==\"en\"):\n \tstop=stopwords.words('english')\n \tfile=open(sw_english, 'r', encoding='utf-8')\n \tmystop=file.readlines()\n\n \n clean_list = []\n cont=0\n for i in mystop:\n #print(i.strip())\n stop.append(i.strip())\n\n #print(stop)\n deletes=[]\n for i in termlist:\n k=i.strip(',.:')\n # print(k)\n if ((k.lower() in stop) or (k in stop)):\n \tdeletes.append(k)\n elif ((k.lower() not in stop) or (k not in stop)):\n clean_list.append(k.replace(',', '').replace('-', ''))\n\n print(deletes)\n cont=len(termlist)-len(clean_list)\n elapsed_time=time()-start_time\n\n txt='CLEAN_TERMS, DELETE ('+str(cont)+') NEW LIST SIZE: ('+str(len(clean_list))+') TIME: ('+str(elapsed_time)+')'\n joind=', '.join(deletes)\n conts_log.information(txt, 'TERMS REMOVED: '+joind)\n print('CLEAN_TERMS, DELETE', cont, len(clean_list), elapsed_time )\n \n \n return(clean_list)\n\n\n# 1 añotador\ndef annotate_timex(text, date, lang):\n \n f=open('texto.txt', 'w')\n f.write(text)\n textanotador2=''\n start_time=time()\n\n url = 'https://annotador.oeg.fi.upm.es/annotate' \n params = \"{\\\"inputText\\\":\\\"\"+text+\"\\\",\\\"inputDate\\\":\\\"\\\",\\\"domain\\\":\\\"legal\\\",\\\"lan\\\":\\\"\"+lang+\"\\\",\\\"format\\\":\\\"timex3\\\"}\"\n headers = {\n\t\t \t\t'Content-Type': 'application/json;charset=utf-8'\n\t}\n #response=requests.post(url, data=params)\n response=requests.request(\"POST\", url, headers=headers, data = params.encode('utf8'))\n textanotador=response.text\n print('ENTRA ANOTADOR')\n print(textanotador)\n\n code=response.status_code\n list_anotador=textanotador.split('|')\n print(list_anotador)\n \n deletes=[]\n cont=0\n for i in list_anotador:\n if('<' in i and len(i)>2):\n cont=cont+1\n deletes.append(i)\n ind=list_anotador.index(i)\n list_anotador.pop(ind)\n for i in list_anotador:\n if('<' in i and len(i)>2):\n print(i)\n cont=cont+1\n deletes.append(i)\n ind=list_anotador.index(i)\n list_anotador.pop(ind)\n \n \n anotador=[]\n for i in list_anotador:\n anotador.append(i.strip().replace(',', ''))\n \n\n if(code!=200):\n\t print('WARNING: Annotador is down. Temporal expressions could not be removed.' )\n\t anotador=text.split('| ')\n\t conts_log.error('Annotador is down. Temporal expressions could not be removed.', code)\n else:\n\t elapsed_time=time()-start_time\n\t txt='AÑOTADOR, DELETE ('+str(cont)+') NEW LIST SIZE: ('+str(len(anotador))+') TIME: ('+str(elapsed_time)+')'\n\t joind=', '.join(deletes)\n\t print('AÑOTADOR DELETE', cont, len(anotador), elapsed_time )\n\t conts_log.information(txt, 'TERMS REMOVED: '+joind)\n \n return(anotador)\n\n\n\n\n\ndef infinitive(verb):\n\t\n\tif(verb[-2:]=='ar' or verb[-2:]=='er' or verb[-2:]=='ir'):\n\t\tverb=verb\n\telse:\n\t\tif(verb[-2:]=='rá' ):\n\t\t\t#print('---',verb,'-',verb[:-1])\n\t\t\tverb=verb[:-1]\n\t\tif(verb[-2:]=='án'):\n\t\t\t#print('---',verb,'-',verb[:-2])\n\t\t\tverb=verb[:-2]\n\t\tif(verb[-2:]=='ré'):\n\t\t\t#print('---',verb,'-',verb[:-1])\n\t\t\tverb=verb[:-1]\n\treturn (verb)\n\n\n# 2.1 patrones es\ndef delete_pattern(anotador, pos_tagger):\n\ttotal=0\n\tdeletes=[]\n\tstart_time=time()\n\tlemmas_list=[]\n\tcont=0\n\tcont_inf=0\n\tcont_post=0\n\tfor i in anotador:\n\t\tprint('this is i')\n\t\tprint(i) \n\t\tif(len(i)>1):\n\t\t\t#print( i, i.split(' ') )\n\t\t\t#pos_tagger = CoreNLPParser('https://corenlp.run/', tagtype='pos')\n #si se cae el de lynx, probar con este https://corenlp.run/\n\t\t\t#print(i)\n\t\t\tdoc=pos_tagger(i)\n\t\t\t#print(doc)\n\t\t\tsent=doc.sentences[0]\n\t\t\tword=sent.words\n\t\t\ttag=[]\n\t\t\tfor token in word:\n\t\t\t\tpos=token.upos\n\t\t\t\tterm=token.text\n\t\t\t\ttupla=(term, pos)\n\t\t\t\ttag.append(tupla)\n\t\t\t\tprint(token.text)\n\t\t\t\tprint(pos)\n\t\t\t#tag=pos_tagger.tag(i.split(' '))\n\t\t\tprint('this is tag ') \n\t\t\tprint(tag)\n\t\t\ttotal=total+1\n\t\t\tjoini=i\n\t\t\tlist_pos=[]\n\t\t\tspl=joini.split(' ')\n\t\t\tif(joini!=''):\n\t\t\t\tjoin_tag=''\n\t\t\t\tfor t in tag:\n\t\t\t\t\tprint('this is t') \n\t\t\t\t\tprint(t)\n\t\t\t\t\tif(t[1] == 'AUX' ):\n\t\t\t\t\t\tdoc=nlp(t[0])\n\t\t\t\t\t\tlemlist=[tok.lemma_ for tok in doc]\n\t\t\t\t\t\tlem=''.join(lemlist)\n\t\t\t\t\t\tlemmas_list.append(lem)\n\t\t\t\t\t\tif(lem==i):\n\t\t\t\t\t\t\tlem=t[0]\n\t\t\t\t\t\tlist_pos.append('aux--'+str(lem))\n\t\t\t\t\t\tif(len(spl)==1):\n\t\t\t\t\t\t\tind=anotador.index(str(i))\n\t\t\t\t\t\t\tanotador[ind]=str(lem)\n\t\t\t\t\tif(t[1] == 'NOUN'):\n\t\t\t\t\t\tlist_pos.append('noun-'+str(t[0]))\n\t\t\t\t\tif(t[1] == 'VERB'):\n\t\t\t\t\t\tcont_inf=cont_inf+1\n\t\t\t\t\t\tdoc=nlp(t[0])\n\t\t\t\t\t\tfor tok in doc:\n\t\t\t\t\t\t\tl=tok.lemma_\n\t\t\t\t\t\t\tif(l!=t[0]):\n\t\t\t\t\t\t\t\tcont_post=cont_post+1\n\t\t\t\t\t\tlemlist=[tok.lemma_ for tok in doc]\n\t\t\t\t\t\tlem=''.join(lemlist)\n\t\t\t\t\t\tlemmas_list.append(lem)\n\t\t\t\t\t\tif(lem==i):\n\t\t\t\t\t\t\tlem=t[0]\n\t\t\t\t\t\tlist_pos.append('verb-'+str(lem))\n\t\t\t\t\t\tif(len(spl)==1):\n\t\t\t\t\t\t\tind=anotador.index(str(i))\n\t\t\t\t\t\t\tanotador[ind]=str(lem)\n\t\t\t\t\tif(t[1] == 'ADV'):\n\t\t\t\t\t\tlist_pos.append('adv--'+str(t[0]))\n\t\t\t\t\tif(t[1] == 'ADJ'):\n\t\t\t\t\t\tlist_pos.append('adj--'+str(t[0]))\n\t\t\t\t\tif(t[1] == 'SCONJ'):\n\t\t\t\t\t\tlist_pos.append('sconj'+str(t[0]))\n\t\t\t\t\n\t\t\t\tspl_i=joini.split(' ')\n\t\t\t\t\n\t\t\t\tif(len(list_pos)==1):\n\t\t\t\t\tpos1=list_pos[0]\n\t\t\t\t\tif(pos1[0:4]=='adv-' ):\n\t\t\t\t\t\tterm=pos1[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\n\t\t\t\telif(len(list_pos)==2 and len(spl_i)==2):\n\t\t\t\t\tpos1=list_pos[0]\n\t\t\t\t\tpos2=list_pos[1]\n\t\t\t\t\tterm=''\n\t\t\t\t\tif(pos1[0:4]=='aux-' and pos2[0:4]=='verb'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='verb' and pos2[0:4]=='aux-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='verb' and pos2[0:4]=='verb'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='verb'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='aux-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='adv-' and pos2[0:4]=='adj-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='adj-' and pos2[0:4]=='adv-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='adv-' and pos2[0:4]=='aux-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='aux-' and pos2[0:4]=='adv-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='adv-' and pos2[0:4]=='verb'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='verb' and pos2[0:4]=='aux-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='adv-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='adv-' and pos2[0:4]=='noun'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='verb' and pos2[0:4]=='adv-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='verb' and pos2[0:4]=='noun'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='aux-' and pos2[0:4]=='noun'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='adj-' and pos2[0:4]=='noun'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\n\t\t\t\telif(len(list_pos)==3 and len(spl_i)==3):\n\t\t\t\t\t#print(list_pos, spl_i,'-', len(list_pos), len(spl_i))\n\t\t\t\t\tpos1=list_pos[0]\n\t\t\t\t\tpos2=list_pos[1]\n\t\t\t\t\tpos3=list_pos[2]\n\t\t\t\t\tterm=''\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='verb' and pos3[0:4]=='verb'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='aux-' and pos3[0:4]=='verb'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='aux-' and pos3[0:4]=='aux-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='verb' and pos3[0:4]=='aux-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\t\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='verb' and pos3[0:4]=='noun'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='aux-' and pos3[0:4]=='noun'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='verb' and pos2[0:4]=='noun' and pos3[0:4]=='noun'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='noun' and pos3[0:4]=='verb'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='aux-' and pos2[0:4]=='noun' and pos3[0:4]=='noun'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='noun' and pos3[0:4]=='aux-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='aux-' and pos2[0:4]=='verb' and pos3[0:4]=='noun'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='verb' and pos3[0:4]=='adj-'):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='verb' and pos3[0:4]=='noun' and joini in anotador):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='verb' and pos2[0:4]=='noun' and pos3[0:4]=='adj-' and joini in anotador):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='aux-' and pos3[0:4]=='adj-' and joini in anotador):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='adv-' and pos3[0:4]=='adj-' and joini in anotador):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='adj-' and pos2[0:4]=='adv-' and pos3[0:4]=='adj-' and joini in anotador):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='noun' and pos2[0:4]=='adv-' and pos3[0:4]=='scon' and joini in anotador):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='adj-' and pos2[0:4]=='scon' and pos3[0:4]=='adv-' and joini in anotador):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='aux-' and pos2[0:4]=='noun' and pos3[0:4]=='adj-' and joini in anotador):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='verb' and pos2[0:4]=='verb' and pos3[0:4]=='verb' and joini in anotador):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\t\t\t\t\tif(pos1[0:4]=='adj-' and pos2[0:4]=='noun' and pos3[0:4]=='adj-' and joini in anotador):\n\t\t\t\t\t\tterm=pos1[5:]+' '+pos2[5:]+' '+pos3[5:]\n\t\t\t\t\t\tdeletes.append(joini)\n\t\t\t\t\t\tind=anotador.index(joini)\n\t\t\t\t\t\t#anotador.pop(ind)\n\t\t\t\t\t\tcont=cont+1\n\n\tfor i in deletes:\n\t\tif(i in anotador):\n\t\t\tind=anotador.index(i)\n\t\t\tanotador.pop(ind)\n\t\t\t\n\t\n\telapsed_time=time()-start_time\n\ttxt='PATRONES, DELETE'+' ('+str(cont)+') NEW LIST SIZE: ('+str(len(anotador))+') TIME: ('+str(elapsed_time)+')'\n\tjoind=', '.join(deletes)\n\tprint('PATRONES DELETE', cont, len(anotador), elapsed_time)\n\tconts_log.information(txt, 'TERMS REMOVED: '+joind)\n\treturn(anotador)\n\n\n\n\n# 3 plurales\ndef quit_plural(valuelist):\n\tstart_time=time()\n\tfile=open('./data/numberlist_es', 'r', encoding='utf-8')\n\tread=file.readlines()\n\tplural=[]\n\tcont=0\n\tfor i in valuelist:\n\t\tind=valuelist.index(i)\n\t\tterm=i.replace(',', '').replace('-', ' ')\n\t\tvaluelist[ind]=term\n\t\tplu=''\n\t\tif('es' in term[-2:] or 's' in term[-1:]):\n\t\t\tslp=term.split(' ')\n\n\t\t\tfor n in read:\n\t\t\t\tif(n[:-1] in slp):\n\t\t\t\t\tplu=i\n\n\t\t\tif not len(plu):\n\t\t\t\tfor j in slp:\n\t\t\t\t\tif( ('es' in j[-2:] ) and 't' not in j[-3:-2] and 'l' not in j[-3:-2] or ('les' in j[-3:] ) ):\n\t\t\t\t\t\tplu+=' '+j[:-2]\n\t\t\t\t\t\t\n\t\t\t\t\t\tif('on' in plu[-2:]):\n\t\t\t\t\t\t\tplu=' '+plu[:-2]+'ón'\n\t\t\t\t\t\tif('v' in plu[-1:]):\n\t\t\t\t\t\t\tplu=' '+plu+'e'\n\t\t\t\t\t\tif('bl' in plu[-2:]):\n\t\t\t\t\t\t\tplu=' '+plu+'e'\n\t\t\t\t\t\tif('br' in plu[-2:]):\n\t\t\t\t\t\t\tplu=' '+plu+'e'\n\n\t\t\t\t\telif(('s' in j[-1:]) ):\n\t\t\t\t\t\tplu+=' '+j[:-1]\n\t\t\t\t\t\tpos=slp.index(j)\n\t\t\t\t\t\t\n\t\t\t\t\t\tif(pos>0):\n\t\t\t\t\t\t\tbef=slp[0]\n\t\t\t\t\t\t\tif('n' in bef[-1:] and 'ón' not in bef[-2:]):\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\tsplb=plu.split(' ')\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\tfirts=splb[1]\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\tif('n' not in firts[-1:]):\n\t\t\t\t\t\t\t\t\tpass\n\t\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t\tplu0=firts[:-1]\n\t\t\t\t\t\t\t\t\tjoin1=' '.join(splb[2:])\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\tplu=plu0+' '+join1\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\n\t\t\t\t\telse:\n\t\t\t\t\t\tplu+=' '+j\n\n\t\t\tind=valuelist.index(term)\n\t\t\tvaluelist[ind]=plu.strip()\t\t\t\n\t\t\tcont=cont+1\n\tquit_plu=[]\n\tnuevalista=set(valuelist)\n\tfor i in nuevalista:\n\t\tquit_plu.append(i)\t\n\n\tdeletes = []\n\tnew=[]\n\tfor i in valuelist:\n\t if i not in new:\n\t new.append(i)\n\t else:\n\t \tdeletes.append(i)\n\t#print('plurañes eliminadas ->', deletes)\n\telapsed_time=time()-start_time\n\ttxt='PLURAL, DELETE'+' ('+str(len(valuelist)-len(quit_plu))+') NEW LIST SIZE: ('+str(len(quit_plu))+') TIME: ('+str(elapsed_time)+')'\n\tjoind=', '.join(deletes)\n\tprint('PLURALES DELETE', len(valuelist)-len(quit_plu), len(quit_plu), elapsed_time)\n\tconts_log.information(txt, 'TERMS REMOVED: '+joind)\n\treturn(quit_plu)\n\n# 4 numeros\ndef delete_numbers(list_):\n\tstart_time=time()\n\tfile=open('./data/numberlist_es', 'r', encoding='utf-8')\n\tread=file.readlines()\n\tcont=0\n\tdeletes=[]\n\tfor i in read:\n\t\tif(i[-1:]=='\\n'):\n\t\t\ti=i[:-1]\n\t\t\tfor j in list_:\n\t\t\t\tif(' '+i+' ' in ' '+j+' ' ):\n\t\t\t\t\tdeletes.append(j)\n\t\t\t\t\tind=list_.index(j)\n\t\t\t\t\tcont=cont+1\n\t\t\t\t\tlist_.pop(ind)\n\t#list_.sort()\n\telapsed_time=time()-start_time\n\ttxt='NUMBERS, DELETE'+' ('+str(cont)+') NEW LIST SIZE: ('+str(len(list_))+') TIME: ('+str(elapsed_time)+')'\n\tjoind=', '.join(deletes)\n\tprint('NUMEROS DELETE', cont, len(list_), elapsed_time)\n\tconts_log.information(txt, 'TERMS REMOVED: '+joind)\n\treturn(list_)\n\n\n# 5 leer archivo \ndef readFile(read):\n\tstart_time=time()\n\ttext=''\n\tfor i in read:\n\t\tif(i[-1:]=='\\n'):\n\t\t\tspl=i[:-1].split('\\t')\n\t\telse:\n\t\t\tspl=i.split('\\t')\n\t\tterm=spl[1].replace('-', '').replace(',', '').replace(';', '')\n\t\tspl2=term.split(' ')\n\t\ttext+='| '+spl[1]\n\telapsed_time=time()-start_time\n\treturn text\n\n#elimina tildes\ndef quit_tilds(s):\n replacements = (\n (\"á\", \"a\"),\n (\"é\", \"e\"),\n (\"í\", \"i\"),\n (\"ó\", \"o\"),\n (\"ú\", \"u\"),\n )\n for a, b in replacements:\n s = s.replace(a, b)\n return s\n\ndef acentos(last):\n\tstart_time=time()\n\ttil=[]\n\tlist_acentos=[]\n\tfor i in last:\n\t\tacento=re.search(\"[áéíóúÁÉÍÓÚ]+\", i)\n\t\tif(acento!=None):\n\t\t\tsin=quit_tilds(i)\n\t\t\tlist_acentos.append(i)\n\t\t\ttil.append(sin)\n\t\telse:\n\t\t\ttil.append(i)\n\n\ttil2 = []\n\tdelete=[]\n\tfor i in til:\n\t\tif i not in til2:\n\t\t\ttil2.append(i)\n\t\telse:\n\t\t\tdelete.append(i)\n\n\tindices=[]\n\tdelete2=[]\n\tfor i in last:\n\t\tif(i in delete and i not in indices):\n\t\t\tindices.append(i)\n\t\t\tdelete2.append(i)\n\tfor i in delete2:\n\t\tind=last.index(i)\n\t\tlast.pop(ind)\n\n\tlast.sort()\n\telapsed_time=time()-start_time\n\t\n\treturn(last)\n\n\n#-------MAIN-------#\ndef main(read, lang_in):\n\tstart_time=time()\n\ttext=readFile(read)\n\tdate='2020-06-03'\n\tlang=lang_in\n\ttermlist=text.split('| ')\n\tprint('RECIBE', termlist)\n\tclean_text=clean_terms(termlist, lang_in)\n\tjoin_clean_text='| '.join(clean_text).replace('-', '').replace(',', '').replace(';', '')\n\tanotador=annotate_timex(join_clean_text, date, lang)\n\tanotador.sort()\n\tif(lang_in=='es'):\n\t\tpattern=delete_pattern(anotador)\n\t\tplural=quit_plural(pattern)\n\n\t\n\t\n\t\n\t\n\tnumbers=delete_numbers(plural)\n\n\ttildes=acentos(numbers)\n\tstop2=clean_terms(tildes, lang_in)\n\tprint('FINALES', stop2)\n\t'''new=open('../data/clean_terms_freq4.txt', 'w')#se imprime lo que se queda\n\n\tfor i in stop2:\n\t new.write(i+'\\n')\n\tnew.close()\n\telapsed_time=time()-start_time\n\tprint('Main', elapsed_time)\n\treturn(stop2)'''\n\n\n#file=open('../data/estatuto_es.txt', 'r', encoding='utf-8')\n#read=file.readlines()\n#main(read)\n",
"step-ids": [
8,
9,
10,
13,
14
]
}
|
[
8,
9,
10,
13,
14
] |
import pandas as pd
import random
import math
# takes 2 row series and calculates the distances between them
def euclidean_dist(a: pd.Series, b: pd.Series):
diff = a.sub(other=b)
squares = diff ** 2
dist = 0
for feature_distance in squares:
if not math.isnan(feature_distance):
dist += feature_distance
return math.sqrt(dist)
# takes copy of dataframe; returns initialized centroid array
def choose_centroids(data_copy: pd.DataFrame):
new_centroids = []
# randomly picks k centroids
for i in range(0, k):
distance_scores = []
# picks furthest centroid from each other if the first one has been picked; else picks a random initial point
if i != 0:
for j in new_centroids:
distances = []
# for j existing centroids, compare to all other points and selects from all of j for next centroid
for row in data_copy.iterrows():
distances.append((euclidean_dist(j, row[1]), row[0]))
distances.sort()
distance_scores.append(distances[-1])
distance_scores.sort()
centroid_index = distance_scores[-1][1]
else:
centroid_index = random.randrange(num_rows)
# drops centroid from copied dataframe to avoid duplicates
data_copy.drop(labels=centroid_index, axis=0, inplace=True)
# appends the newly selected centroid to the list
new_centroids.append(data.iloc[centroid_index])
return new_centroids
def assign_centroids():
cluster_ids = [] # array for storing column output
cluster_dict = {} # dict for mapping centroid IDs (i.e. 89, 102, 34, etc.) to (0, 1, 2, ..., k)
counter = 0
for i in centroids:
if i.name is None:
i.name = counter
cluster_dict[i.name] = counter
counter += 1 # crude way of assigning centroid IDs
for row in data.iterrows():
distances = []
for j in centroids:
dist = euclidean_dist(row[1], j)
if dist != 0:
distances.append((dist, j.name))
distances.sort()
cluster_ids.append(cluster_dict[distances[0][1]])
# inserts cluster assignment column;
# if column already exists, catches exception and removes the column before insertion
try:
data.insert(6, "ClusterID", cluster_ids)
except ValueError:
data.drop(columns="ClusterID", axis=1, inplace=True)
data.insert(6, "ClusterID", cluster_ids)
except IndexError:
data.drop(columns="ClusterID", axis=1, inplace=True)
data.insert(6, "ClusterID", cluster_ids)
return cluster_ids
def recalculate_clusters():
# for k centroids, take the mean of all values belonging to the centroid and make that point the new centroid
for i in range(0, k):
cluster = pd.DataFrame()
for item in data.iterrows():
if item[1].loc['ClusterID'] == i:
cluster = cluster.append(other=item[1])
centroids[i] = cluster.mean()
data = pd.read_csv("data/fire_data_2011.csv")
# uses a dict to convert from tree genus i.e. "Pinu", "Pice",... to 0, 1,...
counter = 0
tree_count_dict = {}
for i in data.iterrows():
try:
tree_count_dict[i[1]["tree_genus"]]
except KeyError:
tree_count_dict[i[1]["tree_genus"]] = counter
counter += 1
data = data.copy().replace(to_replace=tree_count_dict)
print(data)
k = 7
num_rows = data.iloc[-1].name # gets label of the last row to figure out how many instances are in the data
# giving temporary copy of data so selected values can be removed so there aren't duplicate centroids
centroids = choose_centroids(data.copy())
cluster_assignments = []
unchanged_iteration_count = 0
for iterations in range(0, 100):
print("Clustering Progress: [", iterations + 1, "/ 100 ]")
# update previous cluster assignments; reassign cluster IDs and recalculate centroids
previous_assignments = cluster_assignments.copy()
cluster_assignments = assign_centroids()
recalculate_clusters()
# checks if cluster assignments have changed from one iteration to another
if previous_assignments == cluster_assignments and len(previous_assignments) > 0:
unchanged_iteration_count += 1
else:
unchanged_iteration_count = 0
# if cluster assignments haven't changed in 3 iterations, break from loop and exit
if unchanged_iteration_count > 3:
print("Exiting early: cluster assignments haven't changed in 3 iterations")
break
print("\nCluster Counts ( k =", k, "):")
for i in range(0, k):
print("Cluster", i + 1, ": ", cluster_assignments.count(i))
print("\n\n", data)
data.to_csv("./data/fire_data_2011_clustered.csv")
|
normal
|
{
"blob_id": "46b51f46f6ed73e3b9dc2f759535ba71facd2aae",
"index": 5712,
"step-1": "<mask token>\n\n\ndef euclidean_dist(a: pd.Series, b: pd.Series):\n diff = a.sub(other=b)\n squares = diff ** 2\n dist = 0\n for feature_distance in squares:\n if not math.isnan(feature_distance):\n dist += feature_distance\n return math.sqrt(dist)\n\n\ndef choose_centroids(data_copy: pd.DataFrame):\n new_centroids = []\n for i in range(0, k):\n distance_scores = []\n if i != 0:\n for j in new_centroids:\n distances = []\n for row in data_copy.iterrows():\n distances.append((euclidean_dist(j, row[1]), row[0]))\n distances.sort()\n distance_scores.append(distances[-1])\n distance_scores.sort()\n centroid_index = distance_scores[-1][1]\n else:\n centroid_index = random.randrange(num_rows)\n data_copy.drop(labels=centroid_index, axis=0, inplace=True)\n new_centroids.append(data.iloc[centroid_index])\n return new_centroids\n\n\ndef assign_centroids():\n cluster_ids = []\n cluster_dict = {}\n counter = 0\n for i in centroids:\n if i.name is None:\n i.name = counter\n cluster_dict[i.name] = counter\n counter += 1\n for row in data.iterrows():\n distances = []\n for j in centroids:\n dist = euclidean_dist(row[1], j)\n if dist != 0:\n distances.append((dist, j.name))\n distances.sort()\n cluster_ids.append(cluster_dict[distances[0][1]])\n try:\n data.insert(6, 'ClusterID', cluster_ids)\n except ValueError:\n data.drop(columns='ClusterID', axis=1, inplace=True)\n data.insert(6, 'ClusterID', cluster_ids)\n except IndexError:\n data.drop(columns='ClusterID', axis=1, inplace=True)\n data.insert(6, 'ClusterID', cluster_ids)\n return cluster_ids\n\n\ndef recalculate_clusters():\n for i in range(0, k):\n cluster = pd.DataFrame()\n for item in data.iterrows():\n if item[1].loc['ClusterID'] == i:\n cluster = cluster.append(other=item[1])\n centroids[i] = cluster.mean()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef euclidean_dist(a: pd.Series, b: pd.Series):\n diff = a.sub(other=b)\n squares = diff ** 2\n dist = 0\n for feature_distance in squares:\n if not math.isnan(feature_distance):\n dist += feature_distance\n return math.sqrt(dist)\n\n\ndef choose_centroids(data_copy: pd.DataFrame):\n new_centroids = []\n for i in range(0, k):\n distance_scores = []\n if i != 0:\n for j in new_centroids:\n distances = []\n for row in data_copy.iterrows():\n distances.append((euclidean_dist(j, row[1]), row[0]))\n distances.sort()\n distance_scores.append(distances[-1])\n distance_scores.sort()\n centroid_index = distance_scores[-1][1]\n else:\n centroid_index = random.randrange(num_rows)\n data_copy.drop(labels=centroid_index, axis=0, inplace=True)\n new_centroids.append(data.iloc[centroid_index])\n return new_centroids\n\n\ndef assign_centroids():\n cluster_ids = []\n cluster_dict = {}\n counter = 0\n for i in centroids:\n if i.name is None:\n i.name = counter\n cluster_dict[i.name] = counter\n counter += 1\n for row in data.iterrows():\n distances = []\n for j in centroids:\n dist = euclidean_dist(row[1], j)\n if dist != 0:\n distances.append((dist, j.name))\n distances.sort()\n cluster_ids.append(cluster_dict[distances[0][1]])\n try:\n data.insert(6, 'ClusterID', cluster_ids)\n except ValueError:\n data.drop(columns='ClusterID', axis=1, inplace=True)\n data.insert(6, 'ClusterID', cluster_ids)\n except IndexError:\n data.drop(columns='ClusterID', axis=1, inplace=True)\n data.insert(6, 'ClusterID', cluster_ids)\n return cluster_ids\n\n\ndef recalculate_clusters():\n for i in range(0, k):\n cluster = pd.DataFrame()\n for item in data.iterrows():\n if item[1].loc['ClusterID'] == i:\n cluster = cluster.append(other=item[1])\n centroids[i] = cluster.mean()\n\n\n<mask token>\nfor i in data.iterrows():\n try:\n tree_count_dict[i[1]['tree_genus']]\n except KeyError:\n tree_count_dict[i[1]['tree_genus']] = counter\n counter += 1\n<mask token>\nprint(data)\n<mask token>\nfor iterations in range(0, 100):\n print('Clustering Progress: [', iterations + 1, '/ 100 ]')\n previous_assignments = cluster_assignments.copy()\n cluster_assignments = assign_centroids()\n recalculate_clusters()\n if previous_assignments == cluster_assignments and len(previous_assignments\n ) > 0:\n unchanged_iteration_count += 1\n else:\n unchanged_iteration_count = 0\n if unchanged_iteration_count > 3:\n print(\n \"Exiting early: cluster assignments haven't changed in 3 iterations\"\n )\n break\nprint(\"\"\"\nCluster Counts ( k =\"\"\", k, '):')\nfor i in range(0, k):\n print('Cluster', i + 1, ': ', cluster_assignments.count(i))\nprint('\\n\\n', data)\ndata.to_csv('./data/fire_data_2011_clustered.csv')\n",
"step-3": "<mask token>\n\n\ndef euclidean_dist(a: pd.Series, b: pd.Series):\n diff = a.sub(other=b)\n squares = diff ** 2\n dist = 0\n for feature_distance in squares:\n if not math.isnan(feature_distance):\n dist += feature_distance\n return math.sqrt(dist)\n\n\ndef choose_centroids(data_copy: pd.DataFrame):\n new_centroids = []\n for i in range(0, k):\n distance_scores = []\n if i != 0:\n for j in new_centroids:\n distances = []\n for row in data_copy.iterrows():\n distances.append((euclidean_dist(j, row[1]), row[0]))\n distances.sort()\n distance_scores.append(distances[-1])\n distance_scores.sort()\n centroid_index = distance_scores[-1][1]\n else:\n centroid_index = random.randrange(num_rows)\n data_copy.drop(labels=centroid_index, axis=0, inplace=True)\n new_centroids.append(data.iloc[centroid_index])\n return new_centroids\n\n\ndef assign_centroids():\n cluster_ids = []\n cluster_dict = {}\n counter = 0\n for i in centroids:\n if i.name is None:\n i.name = counter\n cluster_dict[i.name] = counter\n counter += 1\n for row in data.iterrows():\n distances = []\n for j in centroids:\n dist = euclidean_dist(row[1], j)\n if dist != 0:\n distances.append((dist, j.name))\n distances.sort()\n cluster_ids.append(cluster_dict[distances[0][1]])\n try:\n data.insert(6, 'ClusterID', cluster_ids)\n except ValueError:\n data.drop(columns='ClusterID', axis=1, inplace=True)\n data.insert(6, 'ClusterID', cluster_ids)\n except IndexError:\n data.drop(columns='ClusterID', axis=1, inplace=True)\n data.insert(6, 'ClusterID', cluster_ids)\n return cluster_ids\n\n\ndef recalculate_clusters():\n for i in range(0, k):\n cluster = pd.DataFrame()\n for item in data.iterrows():\n if item[1].loc['ClusterID'] == i:\n cluster = cluster.append(other=item[1])\n centroids[i] = cluster.mean()\n\n\ndata = pd.read_csv('data/fire_data_2011.csv')\ncounter = 0\ntree_count_dict = {}\nfor i in data.iterrows():\n try:\n tree_count_dict[i[1]['tree_genus']]\n except KeyError:\n tree_count_dict[i[1]['tree_genus']] = counter\n counter += 1\ndata = data.copy().replace(to_replace=tree_count_dict)\nprint(data)\nk = 7\nnum_rows = data.iloc[-1].name\ncentroids = choose_centroids(data.copy())\ncluster_assignments = []\nunchanged_iteration_count = 0\nfor iterations in range(0, 100):\n print('Clustering Progress: [', iterations + 1, '/ 100 ]')\n previous_assignments = cluster_assignments.copy()\n cluster_assignments = assign_centroids()\n recalculate_clusters()\n if previous_assignments == cluster_assignments and len(previous_assignments\n ) > 0:\n unchanged_iteration_count += 1\n else:\n unchanged_iteration_count = 0\n if unchanged_iteration_count > 3:\n print(\n \"Exiting early: cluster assignments haven't changed in 3 iterations\"\n )\n break\nprint(\"\"\"\nCluster Counts ( k =\"\"\", k, '):')\nfor i in range(0, k):\n print('Cluster', i + 1, ': ', cluster_assignments.count(i))\nprint('\\n\\n', data)\ndata.to_csv('./data/fire_data_2011_clustered.csv')\n",
"step-4": "import pandas as pd\nimport random\nimport math\n\n\ndef euclidean_dist(a: pd.Series, b: pd.Series):\n diff = a.sub(other=b)\n squares = diff ** 2\n dist = 0\n for feature_distance in squares:\n if not math.isnan(feature_distance):\n dist += feature_distance\n return math.sqrt(dist)\n\n\ndef choose_centroids(data_copy: pd.DataFrame):\n new_centroids = []\n for i in range(0, k):\n distance_scores = []\n if i != 0:\n for j in new_centroids:\n distances = []\n for row in data_copy.iterrows():\n distances.append((euclidean_dist(j, row[1]), row[0]))\n distances.sort()\n distance_scores.append(distances[-1])\n distance_scores.sort()\n centroid_index = distance_scores[-1][1]\n else:\n centroid_index = random.randrange(num_rows)\n data_copy.drop(labels=centroid_index, axis=0, inplace=True)\n new_centroids.append(data.iloc[centroid_index])\n return new_centroids\n\n\ndef assign_centroids():\n cluster_ids = []\n cluster_dict = {}\n counter = 0\n for i in centroids:\n if i.name is None:\n i.name = counter\n cluster_dict[i.name] = counter\n counter += 1\n for row in data.iterrows():\n distances = []\n for j in centroids:\n dist = euclidean_dist(row[1], j)\n if dist != 0:\n distances.append((dist, j.name))\n distances.sort()\n cluster_ids.append(cluster_dict[distances[0][1]])\n try:\n data.insert(6, 'ClusterID', cluster_ids)\n except ValueError:\n data.drop(columns='ClusterID', axis=1, inplace=True)\n data.insert(6, 'ClusterID', cluster_ids)\n except IndexError:\n data.drop(columns='ClusterID', axis=1, inplace=True)\n data.insert(6, 'ClusterID', cluster_ids)\n return cluster_ids\n\n\ndef recalculate_clusters():\n for i in range(0, k):\n cluster = pd.DataFrame()\n for item in data.iterrows():\n if item[1].loc['ClusterID'] == i:\n cluster = cluster.append(other=item[1])\n centroids[i] = cluster.mean()\n\n\ndata = pd.read_csv('data/fire_data_2011.csv')\ncounter = 0\ntree_count_dict = {}\nfor i in data.iterrows():\n try:\n tree_count_dict[i[1]['tree_genus']]\n except KeyError:\n tree_count_dict[i[1]['tree_genus']] = counter\n counter += 1\ndata = data.copy().replace(to_replace=tree_count_dict)\nprint(data)\nk = 7\nnum_rows = data.iloc[-1].name\ncentroids = choose_centroids(data.copy())\ncluster_assignments = []\nunchanged_iteration_count = 0\nfor iterations in range(0, 100):\n print('Clustering Progress: [', iterations + 1, '/ 100 ]')\n previous_assignments = cluster_assignments.copy()\n cluster_assignments = assign_centroids()\n recalculate_clusters()\n if previous_assignments == cluster_assignments and len(previous_assignments\n ) > 0:\n unchanged_iteration_count += 1\n else:\n unchanged_iteration_count = 0\n if unchanged_iteration_count > 3:\n print(\n \"Exiting early: cluster assignments haven't changed in 3 iterations\"\n )\n break\nprint(\"\"\"\nCluster Counts ( k =\"\"\", k, '):')\nfor i in range(0, k):\n print('Cluster', i + 1, ': ', cluster_assignments.count(i))\nprint('\\n\\n', data)\ndata.to_csv('./data/fire_data_2011_clustered.csv')\n",
"step-5": "import pandas as pd\nimport random\nimport math\n\n\n# takes 2 row series and calculates the distances between them\ndef euclidean_dist(a: pd.Series, b: pd.Series):\n diff = a.sub(other=b)\n squares = diff ** 2\n dist = 0\n\n for feature_distance in squares:\n if not math.isnan(feature_distance):\n dist += feature_distance\n\n return math.sqrt(dist)\n\n\n# takes copy of dataframe; returns initialized centroid array\ndef choose_centroids(data_copy: pd.DataFrame):\n new_centroids = []\n\n # randomly picks k centroids\n for i in range(0, k):\n distance_scores = []\n\n # picks furthest centroid from each other if the first one has been picked; else picks a random initial point\n if i != 0:\n for j in new_centroids:\n distances = []\n\n # for j existing centroids, compare to all other points and selects from all of j for next centroid\n for row in data_copy.iterrows():\n distances.append((euclidean_dist(j, row[1]), row[0]))\n\n distances.sort()\n distance_scores.append(distances[-1])\n\n distance_scores.sort()\n centroid_index = distance_scores[-1][1]\n\n else:\n centroid_index = random.randrange(num_rows)\n\n # drops centroid from copied dataframe to avoid duplicates\n data_copy.drop(labels=centroid_index, axis=0, inplace=True)\n\n # appends the newly selected centroid to the list\n new_centroids.append(data.iloc[centroid_index])\n\n return new_centroids\n\n\ndef assign_centroids():\n cluster_ids = [] # array for storing column output\n cluster_dict = {} # dict for mapping centroid IDs (i.e. 89, 102, 34, etc.) to (0, 1, 2, ..., k)\n counter = 0\n\n for i in centroids:\n if i.name is None:\n i.name = counter\n cluster_dict[i.name] = counter\n counter += 1 # crude way of assigning centroid IDs\n\n for row in data.iterrows():\n distances = []\n\n for j in centroids:\n dist = euclidean_dist(row[1], j)\n if dist != 0:\n distances.append((dist, j.name))\n\n distances.sort()\n cluster_ids.append(cluster_dict[distances[0][1]])\n\n # inserts cluster assignment column;\n # if column already exists, catches exception and removes the column before insertion\n try:\n data.insert(6, \"ClusterID\", cluster_ids)\n except ValueError:\n data.drop(columns=\"ClusterID\", axis=1, inplace=True)\n data.insert(6, \"ClusterID\", cluster_ids)\n except IndexError:\n data.drop(columns=\"ClusterID\", axis=1, inplace=True)\n data.insert(6, \"ClusterID\", cluster_ids)\n return cluster_ids\n\n\ndef recalculate_clusters():\n # for k centroids, take the mean of all values belonging to the centroid and make that point the new centroid\n for i in range(0, k):\n cluster = pd.DataFrame()\n for item in data.iterrows():\n if item[1].loc['ClusterID'] == i:\n cluster = cluster.append(other=item[1])\n centroids[i] = cluster.mean()\n\n\ndata = pd.read_csv(\"data/fire_data_2011.csv\")\n\n# uses a dict to convert from tree genus i.e. \"Pinu\", \"Pice\",... to 0, 1,...\ncounter = 0\ntree_count_dict = {}\nfor i in data.iterrows():\n try:\n tree_count_dict[i[1][\"tree_genus\"]]\n except KeyError:\n tree_count_dict[i[1][\"tree_genus\"]] = counter\n counter += 1\n\ndata = data.copy().replace(to_replace=tree_count_dict)\nprint(data)\n\nk = 7\nnum_rows = data.iloc[-1].name # gets label of the last row to figure out how many instances are in the data\n\n# giving temporary copy of data so selected values can be removed so there aren't duplicate centroids\ncentroids = choose_centroids(data.copy())\n\ncluster_assignments = []\nunchanged_iteration_count = 0\n\nfor iterations in range(0, 100):\n print(\"Clustering Progress: [\", iterations + 1, \"/ 100 ]\")\n\n # update previous cluster assignments; reassign cluster IDs and recalculate centroids\n previous_assignments = cluster_assignments.copy()\n cluster_assignments = assign_centroids()\n recalculate_clusters()\n\n # checks if cluster assignments have changed from one iteration to another\n if previous_assignments == cluster_assignments and len(previous_assignments) > 0:\n unchanged_iteration_count += 1\n else:\n unchanged_iteration_count = 0\n\n # if cluster assignments haven't changed in 3 iterations, break from loop and exit\n if unchanged_iteration_count > 3:\n print(\"Exiting early: cluster assignments haven't changed in 3 iterations\")\n break\n\nprint(\"\\nCluster Counts ( k =\", k, \"):\")\nfor i in range(0, k):\n print(\"Cluster\", i + 1, \": \", cluster_assignments.count(i))\n\nprint(\"\\n\\n\", data)\n\ndata.to_csv(\"./data/fire_data_2011_clustered.csv\")\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
class _NodesTree(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __hasCycle(self, node, new_deps):
if node in new_deps:
return True
deps = set(new_deps)
node2deps = self.node2deps
while deps:
dep = deps.pop()
dep_deps = node2deps[dep]
if node in dep_deps:
return True
deps |= dep_deps
return False
def __depends(self, node, deps):
node2deps = self.node2deps
dep2nodes = self.dep2nodes
try:
current_node_deps = node2deps[node]
deps = {dep for dep in deps if not dep.isBuilt()}
new_deps = deps - current_node_deps
if not new_deps:
return
if self.__hasCycle(node, new_deps):
raise ErrorNodeDependencyCyclic(node, new_deps)
self.tail_nodes.discard(node)
current_node_deps.update(new_deps)
for dep in new_deps:
dep2nodes[dep].add(node)
except KeyError as dep_node:
raise ErrorNodeDependencyUnknown(node, dep_node.args[0])
def __add(self, nodes):
for node in nodes:
if node not in self.node2deps:
self.node2deps[node] = set()
self.dep2nodes[node] = set()
self.tail_nodes.add(node)
node_srcnodes = node.getSourceNodes()
node_depnodes = node.getDepNodes()
self.__add(node_srcnodes)
self.__add(node_depnodes)
self.__depends(node, node_srcnodes)
self.__depends(node, node_depnodes)
<|reserved_special_token_0|>
def depends(self, node, deps):
deps = toSequence(deps)
self.__add(deps)
self.__depends(node, deps)
def removeTail(self, node):
node2deps = self.node2deps
try:
deps = node2deps.pop(node)
if deps:
raise InternalErrorRemoveNonTailNode(node)
except KeyError as node:
raise InternalErrorRemoveUnknownTailNode(node.args[0])
tail_nodes = self.tail_nodes
for dep in self.dep2nodes.pop(node):
d = node2deps[dep]
d.remove(node)
if not d:
tail_nodes.add(dep)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def shrinkTo(self, nodes):
node2deps = self.node2deps
dep2nodes = self.dep2nodes
ignore_nodes = set(node2deps) - self.__getAllNodes(nodes)
self.tail_nodes -= ignore_nodes
for node in ignore_nodes:
del node2deps[node]
del dep2nodes[node]
for dep_nodes in dep2nodes.values():
dep_nodes.difference_update(ignore_nodes)
def selfTest(self):
if set(self.node2deps) != set(self.dep2nodes):
raise AssertionError('Not all deps are added')
all_dep_nodes = set()
for node in self.dep2nodes:
if node not in self.node2deps:
raise AssertionError('Missed node: %s' % (node,))
node_deps = self.node2deps[node]
if not node_deps:
if node not in self.tail_nodes:
raise AssertionError(
'Missed tail node: %s, tail_nodes: %s' % (node,
self.tail_nodes))
elif node in self.tail_nodes:
raise AssertionError('Invalid tail node: %s' % (node,))
all_dep_nodes |= node_deps
for dep in node_deps:
if node not in self.dep2nodes[dep]:
raise AssertionError(
'node not in self.dep2nodes[dep]: dep: %s, node: %s' %
(dep, node))
if all_dep_nodes - set(self.dep2nodes):
raise AssertionError('Not all deps are added')
class _VFiles(object):
__slots__ = 'names', 'handles'
def __init__(self):
self.handles = {}
self.names = {}
def __iter__(self):
raise TypeError()
def __getitem__(self, builder):
builder_name = builder.name
try:
vfilename = self.names[builder_name]
except KeyError:
vfilename = os.path.join(builder.getBuildDir(), '.aql.db')
self.names[builder_name] = vfilename
try:
return self.handles[vfilename]
except KeyError:
vfile = ValuesFile(vfilename)
self.handles[vfilename] = vfile
return vfile
def close(self):
for vfile in self.handles.values():
vfile.close()
self.handles.clear()
self.names.clear()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, backtrace):
self.close()
<|reserved_special_token_0|>
class _NodeState(object):
__slots__ = ('initialized', 'check_depends', 'check_replace',
'check_split', 'check_actual', 'split_nodes')
def __init__(self):
self.initialized = False
self.check_depends = True
self.check_replace = True
self.check_split = True
self.check_actual = True
self.split_nodes = None
def __str__(self):
return (
'initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s'
% (self.initialized, self.check_depends, self.check_replace,
self.check_split, self.check_actual, self.split_nodes))
class _NodesBuilder(object):
__slots__ = ('vfiles', 'build_manager', 'task_manager', 'node_states',
'building_nodes')
def __init__(self, build_manager, jobs=0, keep_going=False,
with_backtrace=True):
self.vfiles = _VFiles()
self.node_states = {}
self.building_nodes = {}
self.build_manager = build_manager
self.task_manager = TaskManager(num_threads=jobs, stop_on_fail=not
keep_going, with_backtrace=with_backtrace)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, backtrace):
self.close()
def _getNodeState(self, node):
try:
state = self.node_states[node]
except KeyError:
state = _NodeState()
self.node_states[node] = state
return state
def _removeNodeState(self, node):
try:
del self.node_states[node]
except KeyError:
pass
def _addBuildingNode(self, node, state):
conflicting_nodes = []
building_nodes = self.building_nodes
for name, signature in node.getNamesAndSignatures():
node_signature = node, signature
other_node, other_signature = building_nodes.setdefault(name,
node_signature)
if other_node is not node:
if other_signature != signature:
raise ErrorNodeSignatureDifferent(node)
conflicting_nodes.append(other_node)
if conflicting_nodes:
state.check_actual = True
self.build_manager.depends(node, conflicting_nodes)
return True
return False
def _removeBuildingNode(self, node):
building_nodes = self.building_nodes
for name in node.getNames():
del building_nodes[name]
def isBuilding(self):
return bool(self.building_nodes)
def _checkPrebuildDepends(self, node):
dep_nodes = node.buildDepends()
if dep_nodes:
self.build_manager.depends(node, dep_nodes)
return True
return False
def _checkPrebuildReplace(self, node):
if node.buildReplace():
new_node_sources = node.getSourceNodes()
if new_node_sources:
self.build_manager.depends(node, new_node_sources)
return True
return False
def _checkPrebuildSplit(self, node, state):
build_manager = self.build_manager
if state.check_split:
state.check_split = False
check_actual = True
if node.isBatch() and state.check_actual:
vfile = self.vfiles[node.builder]
actual = build_manager.isActualNode(node, vfile)
if actual:
self._removeNodeState(node)
build_manager.actualNode(node)
return True
check_actual = False
split_nodes = node.buildSplit()
if split_nodes:
state.split_nodes = split_nodes
for split_node in split_nodes:
split_state = self._getNodeState(split_node)
split_state.check_split = False
split_state.check_depends = False
split_state.check_replace = False
split_state.check_actual = check_actual
split_state.initialized = (split_node.builder is node.
builder)
self.build_manager.depends(node, split_nodes)
return True
elif state.split_nodes is not None:
if node.isBatch():
node._populateTargets()
else:
targets = []
for split_node in state.split_nodes:
targets += split_node.getTargetValues()
node.target_values = targets
self._removeNodeState(node)
self.build_manager.completedSplitNode(node)
return True
return False
def _prebuild(self, node, state):
if not state.initialized:
node.initiate()
state.initialized = True
if state.check_depends:
state.check_depends = False
if self._checkPrebuildDepends(node):
return True
if state.check_replace:
state.check_replace = False
if self._checkPrebuildReplace(node):
return True
if self._checkPrebuildSplit(node, state):
return True
return False
def build(self, nodes):
build_manager = self.build_manager
vfiles = self.vfiles
addTask = self.task_manager.addTask
tasks_check_period = 10
added_tasks = 0
changed = False
for node in nodes:
node_state = self._getNodeState(node)
if self._prebuild(node, node_state):
changed = True
continue
if self._addBuildingNode(node, node_state):
continue
if node_state.check_actual:
vfile = vfiles[node.builder]
actual = build_manager.isActualNode(node, vfile)
if actual:
self._removeNodeState(node)
self._removeBuildingNode(node)
build_manager.actualNode(node)
changed = True
continue
addTask(node, _buildNode, node)
added_tasks += 1
if added_tasks == tasks_check_period:
changed = self._getFinishedNodes(block=False) or changed
added_tasks = 0
self._getFinishedNodes(block=not changed)
def _getFinishedNodes(self, block=True):
finished_tasks = self.task_manager.finishedTasks(block=block)
vfiles = self.vfiles
build_manager = self.build_manager
for task in finished_tasks:
node = task.task_id
error = task.error
self._removeNodeState(node)
self._removeBuildingNode(node)
vfile = vfiles[node.builder]
if error is None:
node.save(vfile)
build_manager.completedNode(node, task.result)
else:
if node.isBatch():
node.save(vfile)
build_manager.failedNode(node, error)
return bool(finished_tasks)
def clear(self, nodes):
vfiles = self.vfiles
build_manager = self.build_manager
for node in nodes:
node_state = self._getNodeState(node)
node_state.check_actual = False
if self._prebuild(node, node_state):
continue
vfile = vfiles[node.builder]
node.clear(vfile)
build_manager.removedNode(node)
def status(self, nodes):
vfiles = self.vfiles
build_manager = self.build_manager
for node in nodes:
node_state = self._getNodeState(node)
node_state.check_actual = False
if self._prebuild(node, node_state):
continue
vfile = vfiles[node.builder]
if build_manager.isActualNode(node, vfile):
build_manager.actualNodeStatus(node)
else:
build_manager.outdatedNodeStatus(node)
def close(self):
try:
self.task_manager.stop()
self._getFinishedNodes(block=False)
finally:
self.vfiles.close()
class BuildManager(object):
__slots__ = ('_nodes', '_built_targets', '_failed_nodes',
'_built_node_names', 'completed', 'actual', 'explain')
def __init__(self):
self._nodes = _NodesTree()
self.__reset()
def __reset(self, build_always=False, explain=False):
self._built_targets = {}
self._failed_nodes = {}
self._built_node_names = set() if build_always else None
self.completed = 0
self.actual = 0
self.explain = explain
def add(self, nodes):
self._nodes.add(nodes)
def depends(self, node, deps):
self._nodes.depends(node, deps)
def __len__(self):
return len(self._nodes)
def selfTest(self):
self._nodes.selfTest()
def getTailNodes(self):
return self._nodes.popTails()
def actualNodeStatus(self, node):
eventNodeActual(node, self.getProgressStr())
self.actualNode(node)
def outdatedNodeStatus(self, node):
self._failed_nodes[node] = None
eventNodeOutdated(node, self.getProgressStr())
node.shrink()
def isActualNode(self, node, vfile):
return node.checkActual(vfile, self._built_node_names, self.explain)
def _addToBuiltNodeNames(self, node):
built_names = self._built_node_names
if built_names is not None:
built_names.update(node.getNames())
def completedSplitNode(self, node):
self._nodes.removeTail(node)
node.shrink()
def actualNode(self, node):
self._nodes.removeTail(node)
self.actual += 1
node.shrink()
def completedNode(self, node, builder_output):
self._checkAlreadyBuilt(node)
self._nodes.removeTail(node)
self._addToBuiltNodeNames(node)
self.completed += 1
eventNodeBuildingFinished(node, builder_output, self.getProgressStr())
node.shrink()
def failedNode(self, node, error):
self._failed_nodes[node] = error
eventNodeBuildingFailed(node, error)
def removedNode(self, node):
self._nodes.removeTail(node)
self.completed += 1
eventNodeRemoved(node, self.getProgressStr())
node.shrink()
def getProgressStr(self):
done = self.completed + self.actual
total = len(self._nodes) + done
processed = done + len(self._failed_nodes)
progress = '%s/%s' % (processed, total)
return progress
def close(self):
self._nodes = _NodesTree()
def _checkAlreadyBuilt(self, node):
values = node.getTargetValues()
built_targets = self._built_targets
for value in values:
value_sign = value.signature
other_value_sign = built_targets.setdefault(value.valueId(),
value_sign)
if other_value_sign != value_sign:
eventBuildTargetTwice(value, node)
def build(self, jobs, keep_going, nodes=None, build_always=False,
explain=False, with_backtrace=True):
self.__reset(build_always=build_always, explain=explain)
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo(nodes)
with _NodesBuilder(self, jobs, keep_going, with_backtrace
) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails and not nodes_builder.isBuilding():
break
nodes_builder.build(tails)
return self.isOk()
def isOk(self):
return not bool(self._failed_nodes)
def failsCount(self):
return len(self._failed_nodes)
def printFails(self):
for node, error in self._failed_nodes.items():
eventFailedNode(node, error)
def printBuildState(self):
logInfo('Failed nodes: %s' % len(self._failed_nodes))
logInfo('Completed nodes: %s' % self.completed)
logInfo('Actual nodes: %s' % self.actual)
def printStatusState(self):
logInfo('Outdated nodes: %s' % len(self._failed_nodes))
logInfo('Actual nodes: %s' % self.actual)
def clear(self, nodes=None):
self.__reset()
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo(nodes)
with _NodesBuilder(self) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails:
break
nodes_builder.clear(tails)
def status(self, nodes=None, explain=False):
self.__reset(explain=explain)
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo(nodes)
with _NodesBuilder(self) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails:
break
nodes_builder.status(tails)
return self.isOk()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class InternalErrorRemoveNonTailNode(AqlException):
<|reserved_special_token_0|>
class InternalErrorRemoveUnknownTailNode(AqlException):
def __init__(self, node):
msg = 'Remove unknown tail node: : %s' % (node,)
super(InternalErrorRemoveUnknownTailNode, self).__init__(msg)
class BuildStat(object):
__slots__ = 'total', 'completed', 'failed'
def __init__(self, total):
self.total = total
self.completed = 0
self.failed = 0
def addTotal(self, count):
self.total += count
def incCompleted(self):
self.completed += 1
def incFailed(self):
self.failed += 1
def getProgressStr(self):
progress = '%s/%s' % (self.completed + self.failed, self.total)
return progress
class _NodesTree(object):
__slots__ = 'node2deps', 'dep2nodes', 'tail_nodes'
def __init__(self):
self.node2deps = {}
self.dep2nodes = {}
self.tail_nodes = set()
def __len__(self):
return len(self.node2deps)
def __hasCycle(self, node, new_deps):
if node in new_deps:
return True
deps = set(new_deps)
node2deps = self.node2deps
while deps:
dep = deps.pop()
dep_deps = node2deps[dep]
if node in dep_deps:
return True
deps |= dep_deps
return False
def __depends(self, node, deps):
node2deps = self.node2deps
dep2nodes = self.dep2nodes
try:
current_node_deps = node2deps[node]
deps = {dep for dep in deps if not dep.isBuilt()}
new_deps = deps - current_node_deps
if not new_deps:
return
if self.__hasCycle(node, new_deps):
raise ErrorNodeDependencyCyclic(node, new_deps)
self.tail_nodes.discard(node)
current_node_deps.update(new_deps)
for dep in new_deps:
dep2nodes[dep].add(node)
except KeyError as dep_node:
raise ErrorNodeDependencyUnknown(node, dep_node.args[0])
def __add(self, nodes):
for node in nodes:
if node not in self.node2deps:
self.node2deps[node] = set()
self.dep2nodes[node] = set()
self.tail_nodes.add(node)
node_srcnodes = node.getSourceNodes()
node_depnodes = node.getDepNodes()
self.__add(node_srcnodes)
self.__add(node_depnodes)
self.__depends(node, node_srcnodes)
self.__depends(node, node_depnodes)
def add(self, nodes):
self.__add(toSequence(nodes))
def depends(self, node, deps):
deps = toSequence(deps)
self.__add(deps)
self.__depends(node, deps)
def removeTail(self, node):
node2deps = self.node2deps
try:
deps = node2deps.pop(node)
if deps:
raise InternalErrorRemoveNonTailNode(node)
except KeyError as node:
raise InternalErrorRemoveUnknownTailNode(node.args[0])
tail_nodes = self.tail_nodes
for dep in self.dep2nodes.pop(node):
d = node2deps[dep]
d.remove(node)
if not d:
tail_nodes.add(dep)
def popTails(self):
tails = self.tail_nodes
self.tail_nodes = set()
return tails
def __getAllNodes(self, nodes):
nodes = set(toSequence(nodes))
all_nodes = set(nodes)
node2deps = self.node2deps
while nodes:
node = nodes.pop()
try:
deps = node2deps[node] - all_nodes
except KeyError as node:
raise ErrorNodeUnknown(node.args[0])
all_nodes.update(deps)
nodes.update(deps)
return all_nodes
def shrinkTo(self, nodes):
node2deps = self.node2deps
dep2nodes = self.dep2nodes
ignore_nodes = set(node2deps) - self.__getAllNodes(nodes)
self.tail_nodes -= ignore_nodes
for node in ignore_nodes:
del node2deps[node]
del dep2nodes[node]
for dep_nodes in dep2nodes.values():
dep_nodes.difference_update(ignore_nodes)
def selfTest(self):
if set(self.node2deps) != set(self.dep2nodes):
raise AssertionError('Not all deps are added')
all_dep_nodes = set()
for node in self.dep2nodes:
if node not in self.node2deps:
raise AssertionError('Missed node: %s' % (node,))
node_deps = self.node2deps[node]
if not node_deps:
if node not in self.tail_nodes:
raise AssertionError(
'Missed tail node: %s, tail_nodes: %s' % (node,
self.tail_nodes))
elif node in self.tail_nodes:
raise AssertionError('Invalid tail node: %s' % (node,))
all_dep_nodes |= node_deps
for dep in node_deps:
if node not in self.dep2nodes[dep]:
raise AssertionError(
'node not in self.dep2nodes[dep]: dep: %s, node: %s' %
(dep, node))
if all_dep_nodes - set(self.dep2nodes):
raise AssertionError('Not all deps are added')
class _VFiles(object):
__slots__ = 'names', 'handles'
def __init__(self):
self.handles = {}
self.names = {}
def __iter__(self):
raise TypeError()
def __getitem__(self, builder):
builder_name = builder.name
try:
vfilename = self.names[builder_name]
except KeyError:
vfilename = os.path.join(builder.getBuildDir(), '.aql.db')
self.names[builder_name] = vfilename
try:
return self.handles[vfilename]
except KeyError:
vfile = ValuesFile(vfilename)
self.handles[vfilename] = vfile
return vfile
def close(self):
for vfile in self.handles.values():
vfile.close()
self.handles.clear()
self.names.clear()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, backtrace):
self.close()
<|reserved_special_token_0|>
class _NodeState(object):
__slots__ = ('initialized', 'check_depends', 'check_replace',
'check_split', 'check_actual', 'split_nodes')
def __init__(self):
self.initialized = False
self.check_depends = True
self.check_replace = True
self.check_split = True
self.check_actual = True
self.split_nodes = None
def __str__(self):
return (
'initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s'
% (self.initialized, self.check_depends, self.check_replace,
self.check_split, self.check_actual, self.split_nodes))
class _NodesBuilder(object):
__slots__ = ('vfiles', 'build_manager', 'task_manager', 'node_states',
'building_nodes')
def __init__(self, build_manager, jobs=0, keep_going=False,
with_backtrace=True):
self.vfiles = _VFiles()
self.node_states = {}
self.building_nodes = {}
self.build_manager = build_manager
self.task_manager = TaskManager(num_threads=jobs, stop_on_fail=not
keep_going, with_backtrace=with_backtrace)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, backtrace):
self.close()
def _getNodeState(self, node):
try:
state = self.node_states[node]
except KeyError:
state = _NodeState()
self.node_states[node] = state
return state
def _removeNodeState(self, node):
try:
del self.node_states[node]
except KeyError:
pass
def _addBuildingNode(self, node, state):
conflicting_nodes = []
building_nodes = self.building_nodes
for name, signature in node.getNamesAndSignatures():
node_signature = node, signature
other_node, other_signature = building_nodes.setdefault(name,
node_signature)
if other_node is not node:
if other_signature != signature:
raise ErrorNodeSignatureDifferent(node)
conflicting_nodes.append(other_node)
if conflicting_nodes:
state.check_actual = True
self.build_manager.depends(node, conflicting_nodes)
return True
return False
def _removeBuildingNode(self, node):
building_nodes = self.building_nodes
for name in node.getNames():
del building_nodes[name]
def isBuilding(self):
return bool(self.building_nodes)
def _checkPrebuildDepends(self, node):
dep_nodes = node.buildDepends()
if dep_nodes:
self.build_manager.depends(node, dep_nodes)
return True
return False
def _checkPrebuildReplace(self, node):
if node.buildReplace():
new_node_sources = node.getSourceNodes()
if new_node_sources:
self.build_manager.depends(node, new_node_sources)
return True
return False
def _checkPrebuildSplit(self, node, state):
build_manager = self.build_manager
if state.check_split:
state.check_split = False
check_actual = True
if node.isBatch() and state.check_actual:
vfile = self.vfiles[node.builder]
actual = build_manager.isActualNode(node, vfile)
if actual:
self._removeNodeState(node)
build_manager.actualNode(node)
return True
check_actual = False
split_nodes = node.buildSplit()
if split_nodes:
state.split_nodes = split_nodes
for split_node in split_nodes:
split_state = self._getNodeState(split_node)
split_state.check_split = False
split_state.check_depends = False
split_state.check_replace = False
split_state.check_actual = check_actual
split_state.initialized = (split_node.builder is node.
builder)
self.build_manager.depends(node, split_nodes)
return True
elif state.split_nodes is not None:
if node.isBatch():
node._populateTargets()
else:
targets = []
for split_node in state.split_nodes:
targets += split_node.getTargetValues()
node.target_values = targets
self._removeNodeState(node)
self.build_manager.completedSplitNode(node)
return True
return False
def _prebuild(self, node, state):
if not state.initialized:
node.initiate()
state.initialized = True
if state.check_depends:
state.check_depends = False
if self._checkPrebuildDepends(node):
return True
if state.check_replace:
state.check_replace = False
if self._checkPrebuildReplace(node):
return True
if self._checkPrebuildSplit(node, state):
return True
return False
def build(self, nodes):
build_manager = self.build_manager
vfiles = self.vfiles
addTask = self.task_manager.addTask
tasks_check_period = 10
added_tasks = 0
changed = False
for node in nodes:
node_state = self._getNodeState(node)
if self._prebuild(node, node_state):
changed = True
continue
if self._addBuildingNode(node, node_state):
continue
if node_state.check_actual:
vfile = vfiles[node.builder]
actual = build_manager.isActualNode(node, vfile)
if actual:
self._removeNodeState(node)
self._removeBuildingNode(node)
build_manager.actualNode(node)
changed = True
continue
addTask(node, _buildNode, node)
added_tasks += 1
if added_tasks == tasks_check_period:
changed = self._getFinishedNodes(block=False) or changed
added_tasks = 0
self._getFinishedNodes(block=not changed)
def _getFinishedNodes(self, block=True):
finished_tasks = self.task_manager.finishedTasks(block=block)
vfiles = self.vfiles
build_manager = self.build_manager
for task in finished_tasks:
node = task.task_id
error = task.error
self._removeNodeState(node)
self._removeBuildingNode(node)
vfile = vfiles[node.builder]
if error is None:
node.save(vfile)
build_manager.completedNode(node, task.result)
else:
if node.isBatch():
node.save(vfile)
build_manager.failedNode(node, error)
return bool(finished_tasks)
def clear(self, nodes):
vfiles = self.vfiles
build_manager = self.build_manager
for node in nodes:
node_state = self._getNodeState(node)
node_state.check_actual = False
if self._prebuild(node, node_state):
continue
vfile = vfiles[node.builder]
node.clear(vfile)
build_manager.removedNode(node)
def status(self, nodes):
vfiles = self.vfiles
build_manager = self.build_manager
for node in nodes:
node_state = self._getNodeState(node)
node_state.check_actual = False
if self._prebuild(node, node_state):
continue
vfile = vfiles[node.builder]
if build_manager.isActualNode(node, vfile):
build_manager.actualNodeStatus(node)
else:
build_manager.outdatedNodeStatus(node)
def close(self):
try:
self.task_manager.stop()
self._getFinishedNodes(block=False)
finally:
self.vfiles.close()
class BuildManager(object):
__slots__ = ('_nodes', '_built_targets', '_failed_nodes',
'_built_node_names', 'completed', 'actual', 'explain')
def __init__(self):
self._nodes = _NodesTree()
self.__reset()
def __reset(self, build_always=False, explain=False):
self._built_targets = {}
self._failed_nodes = {}
self._built_node_names = set() if build_always else None
self.completed = 0
self.actual = 0
self.explain = explain
def add(self, nodes):
self._nodes.add(nodes)
def depends(self, node, deps):
self._nodes.depends(node, deps)
def __len__(self):
return len(self._nodes)
def selfTest(self):
self._nodes.selfTest()
def getTailNodes(self):
return self._nodes.popTails()
def actualNodeStatus(self, node):
eventNodeActual(node, self.getProgressStr())
self.actualNode(node)
def outdatedNodeStatus(self, node):
self._failed_nodes[node] = None
eventNodeOutdated(node, self.getProgressStr())
node.shrink()
def isActualNode(self, node, vfile):
return node.checkActual(vfile, self._built_node_names, self.explain)
def _addToBuiltNodeNames(self, node):
built_names = self._built_node_names
if built_names is not None:
built_names.update(node.getNames())
def completedSplitNode(self, node):
self._nodes.removeTail(node)
node.shrink()
def actualNode(self, node):
self._nodes.removeTail(node)
self.actual += 1
node.shrink()
def completedNode(self, node, builder_output):
self._checkAlreadyBuilt(node)
self._nodes.removeTail(node)
self._addToBuiltNodeNames(node)
self.completed += 1
eventNodeBuildingFinished(node, builder_output, self.getProgressStr())
node.shrink()
def failedNode(self, node, error):
self._failed_nodes[node] = error
eventNodeBuildingFailed(node, error)
def removedNode(self, node):
self._nodes.removeTail(node)
self.completed += 1
eventNodeRemoved(node, self.getProgressStr())
node.shrink()
def getProgressStr(self):
done = self.completed + self.actual
total = len(self._nodes) + done
processed = done + len(self._failed_nodes)
progress = '%s/%s' % (processed, total)
return progress
def close(self):
self._nodes = _NodesTree()
def _checkAlreadyBuilt(self, node):
values = node.getTargetValues()
built_targets = self._built_targets
for value in values:
value_sign = value.signature
other_value_sign = built_targets.setdefault(value.valueId(),
value_sign)
if other_value_sign != value_sign:
eventBuildTargetTwice(value, node)
def build(self, jobs, keep_going, nodes=None, build_always=False,
explain=False, with_backtrace=True):
self.__reset(build_always=build_always, explain=explain)
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo(nodes)
with _NodesBuilder(self, jobs, keep_going, with_backtrace
) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails and not nodes_builder.isBuilding():
break
nodes_builder.build(tails)
return self.isOk()
def isOk(self):
return not bool(self._failed_nodes)
def failsCount(self):
return len(self._failed_nodes)
def printFails(self):
for node, error in self._failed_nodes.items():
eventFailedNode(node, error)
def printBuildState(self):
logInfo('Failed nodes: %s' % len(self._failed_nodes))
logInfo('Completed nodes: %s' % self.completed)
logInfo('Actual nodes: %s' % self.actual)
def printStatusState(self):
logInfo('Outdated nodes: %s' % len(self._failed_nodes))
logInfo('Actual nodes: %s' % self.actual)
def clear(self, nodes=None):
self.__reset()
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo(nodes)
with _NodesBuilder(self) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails:
break
nodes_builder.clear(tails)
def status(self, nodes=None, explain=False):
self.__reset(explain=explain)
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo(nodes)
with _NodesBuilder(self) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails:
break
nodes_builder.status(tails)
return self.isOk()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ErrorNodeSignatureDifferent(AqlException):
<|reserved_special_token_0|>
class ErrorNodeDependencyUnknown(AqlException):
def __init__(self, node, dep_node):
msg = "Unable to add dependency to node '%s' from node '%s'" % (node,
dep_node)
super(ErrorNodeDependencyUnknown, self).__init__(msg)
class InternalErrorRemoveNonTailNode(AqlException):
def __init__(self, node):
msg = 'Removing non-tail node: %s' % (node,)
super(InternalErrorRemoveNonTailNode, self).__init__(msg)
class InternalErrorRemoveUnknownTailNode(AqlException):
def __init__(self, node):
msg = 'Remove unknown tail node: : %s' % (node,)
super(InternalErrorRemoveUnknownTailNode, self).__init__(msg)
class BuildStat(object):
__slots__ = 'total', 'completed', 'failed'
def __init__(self, total):
self.total = total
self.completed = 0
self.failed = 0
def addTotal(self, count):
self.total += count
def incCompleted(self):
self.completed += 1
def incFailed(self):
self.failed += 1
def getProgressStr(self):
progress = '%s/%s' % (self.completed + self.failed, self.total)
return progress
class _NodesTree(object):
__slots__ = 'node2deps', 'dep2nodes', 'tail_nodes'
def __init__(self):
self.node2deps = {}
self.dep2nodes = {}
self.tail_nodes = set()
def __len__(self):
return len(self.node2deps)
def __hasCycle(self, node, new_deps):
if node in new_deps:
return True
deps = set(new_deps)
node2deps = self.node2deps
while deps:
dep = deps.pop()
dep_deps = node2deps[dep]
if node in dep_deps:
return True
deps |= dep_deps
return False
def __depends(self, node, deps):
node2deps = self.node2deps
dep2nodes = self.dep2nodes
try:
current_node_deps = node2deps[node]
deps = {dep for dep in deps if not dep.isBuilt()}
new_deps = deps - current_node_deps
if not new_deps:
return
if self.__hasCycle(node, new_deps):
raise ErrorNodeDependencyCyclic(node, new_deps)
self.tail_nodes.discard(node)
current_node_deps.update(new_deps)
for dep in new_deps:
dep2nodes[dep].add(node)
except KeyError as dep_node:
raise ErrorNodeDependencyUnknown(node, dep_node.args[0])
def __add(self, nodes):
for node in nodes:
if node not in self.node2deps:
self.node2deps[node] = set()
self.dep2nodes[node] = set()
self.tail_nodes.add(node)
node_srcnodes = node.getSourceNodes()
node_depnodes = node.getDepNodes()
self.__add(node_srcnodes)
self.__add(node_depnodes)
self.__depends(node, node_srcnodes)
self.__depends(node, node_depnodes)
def add(self, nodes):
self.__add(toSequence(nodes))
def depends(self, node, deps):
deps = toSequence(deps)
self.__add(deps)
self.__depends(node, deps)
def removeTail(self, node):
node2deps = self.node2deps
try:
deps = node2deps.pop(node)
if deps:
raise InternalErrorRemoveNonTailNode(node)
except KeyError as node:
raise InternalErrorRemoveUnknownTailNode(node.args[0])
tail_nodes = self.tail_nodes
for dep in self.dep2nodes.pop(node):
d = node2deps[dep]
d.remove(node)
if not d:
tail_nodes.add(dep)
def popTails(self):
tails = self.tail_nodes
self.tail_nodes = set()
return tails
def __getAllNodes(self, nodes):
nodes = set(toSequence(nodes))
all_nodes = set(nodes)
node2deps = self.node2deps
while nodes:
node = nodes.pop()
try:
deps = node2deps[node] - all_nodes
except KeyError as node:
raise ErrorNodeUnknown(node.args[0])
all_nodes.update(deps)
nodes.update(deps)
return all_nodes
def shrinkTo(self, nodes):
node2deps = self.node2deps
dep2nodes = self.dep2nodes
ignore_nodes = set(node2deps) - self.__getAllNodes(nodes)
self.tail_nodes -= ignore_nodes
for node in ignore_nodes:
del node2deps[node]
del dep2nodes[node]
for dep_nodes in dep2nodes.values():
dep_nodes.difference_update(ignore_nodes)
def selfTest(self):
if set(self.node2deps) != set(self.dep2nodes):
raise AssertionError('Not all deps are added')
all_dep_nodes = set()
for node in self.dep2nodes:
if node not in self.node2deps:
raise AssertionError('Missed node: %s' % (node,))
node_deps = self.node2deps[node]
if not node_deps:
if node not in self.tail_nodes:
raise AssertionError(
'Missed tail node: %s, tail_nodes: %s' % (node,
self.tail_nodes))
elif node in self.tail_nodes:
raise AssertionError('Invalid tail node: %s' % (node,))
all_dep_nodes |= node_deps
for dep in node_deps:
if node not in self.dep2nodes[dep]:
raise AssertionError(
'node not in self.dep2nodes[dep]: dep: %s, node: %s' %
(dep, node))
if all_dep_nodes - set(self.dep2nodes):
raise AssertionError('Not all deps are added')
class _VFiles(object):
__slots__ = 'names', 'handles'
def __init__(self):
self.handles = {}
self.names = {}
def __iter__(self):
raise TypeError()
def __getitem__(self, builder):
builder_name = builder.name
try:
vfilename = self.names[builder_name]
except KeyError:
vfilename = os.path.join(builder.getBuildDir(), '.aql.db')
self.names[builder_name] = vfilename
try:
return self.handles[vfilename]
except KeyError:
vfile = ValuesFile(vfilename)
self.handles[vfilename] = vfile
return vfile
def close(self):
for vfile in self.handles.values():
vfile.close()
self.handles.clear()
self.names.clear()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, backtrace):
self.close()
<|reserved_special_token_0|>
class _NodeState(object):
__slots__ = ('initialized', 'check_depends', 'check_replace',
'check_split', 'check_actual', 'split_nodes')
def __init__(self):
self.initialized = False
self.check_depends = True
self.check_replace = True
self.check_split = True
self.check_actual = True
self.split_nodes = None
def __str__(self):
return (
'initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s'
% (self.initialized, self.check_depends, self.check_replace,
self.check_split, self.check_actual, self.split_nodes))
class _NodesBuilder(object):
__slots__ = ('vfiles', 'build_manager', 'task_manager', 'node_states',
'building_nodes')
def __init__(self, build_manager, jobs=0, keep_going=False,
with_backtrace=True):
self.vfiles = _VFiles()
self.node_states = {}
self.building_nodes = {}
self.build_manager = build_manager
self.task_manager = TaskManager(num_threads=jobs, stop_on_fail=not
keep_going, with_backtrace=with_backtrace)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, backtrace):
self.close()
def _getNodeState(self, node):
try:
state = self.node_states[node]
except KeyError:
state = _NodeState()
self.node_states[node] = state
return state
def _removeNodeState(self, node):
try:
del self.node_states[node]
except KeyError:
pass
def _addBuildingNode(self, node, state):
conflicting_nodes = []
building_nodes = self.building_nodes
for name, signature in node.getNamesAndSignatures():
node_signature = node, signature
other_node, other_signature = building_nodes.setdefault(name,
node_signature)
if other_node is not node:
if other_signature != signature:
raise ErrorNodeSignatureDifferent(node)
conflicting_nodes.append(other_node)
if conflicting_nodes:
state.check_actual = True
self.build_manager.depends(node, conflicting_nodes)
return True
return False
def _removeBuildingNode(self, node):
building_nodes = self.building_nodes
for name in node.getNames():
del building_nodes[name]
def isBuilding(self):
return bool(self.building_nodes)
def _checkPrebuildDepends(self, node):
dep_nodes = node.buildDepends()
if dep_nodes:
self.build_manager.depends(node, dep_nodes)
return True
return False
def _checkPrebuildReplace(self, node):
if node.buildReplace():
new_node_sources = node.getSourceNodes()
if new_node_sources:
self.build_manager.depends(node, new_node_sources)
return True
return False
def _checkPrebuildSplit(self, node, state):
build_manager = self.build_manager
if state.check_split:
state.check_split = False
check_actual = True
if node.isBatch() and state.check_actual:
vfile = self.vfiles[node.builder]
actual = build_manager.isActualNode(node, vfile)
if actual:
self._removeNodeState(node)
build_manager.actualNode(node)
return True
check_actual = False
split_nodes = node.buildSplit()
if split_nodes:
state.split_nodes = split_nodes
for split_node in split_nodes:
split_state = self._getNodeState(split_node)
split_state.check_split = False
split_state.check_depends = False
split_state.check_replace = False
split_state.check_actual = check_actual
split_state.initialized = (split_node.builder is node.
builder)
self.build_manager.depends(node, split_nodes)
return True
elif state.split_nodes is not None:
if node.isBatch():
node._populateTargets()
else:
targets = []
for split_node in state.split_nodes:
targets += split_node.getTargetValues()
node.target_values = targets
self._removeNodeState(node)
self.build_manager.completedSplitNode(node)
return True
return False
def _prebuild(self, node, state):
if not state.initialized:
node.initiate()
state.initialized = True
if state.check_depends:
state.check_depends = False
if self._checkPrebuildDepends(node):
return True
if state.check_replace:
state.check_replace = False
if self._checkPrebuildReplace(node):
return True
if self._checkPrebuildSplit(node, state):
return True
return False
def build(self, nodes):
build_manager = self.build_manager
vfiles = self.vfiles
addTask = self.task_manager.addTask
tasks_check_period = 10
added_tasks = 0
changed = False
for node in nodes:
node_state = self._getNodeState(node)
if self._prebuild(node, node_state):
changed = True
continue
if self._addBuildingNode(node, node_state):
continue
if node_state.check_actual:
vfile = vfiles[node.builder]
actual = build_manager.isActualNode(node, vfile)
if actual:
self._removeNodeState(node)
self._removeBuildingNode(node)
build_manager.actualNode(node)
changed = True
continue
addTask(node, _buildNode, node)
added_tasks += 1
if added_tasks == tasks_check_period:
changed = self._getFinishedNodes(block=False) or changed
added_tasks = 0
self._getFinishedNodes(block=not changed)
def _getFinishedNodes(self, block=True):
finished_tasks = self.task_manager.finishedTasks(block=block)
vfiles = self.vfiles
build_manager = self.build_manager
for task in finished_tasks:
node = task.task_id
error = task.error
self._removeNodeState(node)
self._removeBuildingNode(node)
vfile = vfiles[node.builder]
if error is None:
node.save(vfile)
build_manager.completedNode(node, task.result)
else:
if node.isBatch():
node.save(vfile)
build_manager.failedNode(node, error)
return bool(finished_tasks)
def clear(self, nodes):
vfiles = self.vfiles
build_manager = self.build_manager
for node in nodes:
node_state = self._getNodeState(node)
node_state.check_actual = False
if self._prebuild(node, node_state):
continue
vfile = vfiles[node.builder]
node.clear(vfile)
build_manager.removedNode(node)
def status(self, nodes):
vfiles = self.vfiles
build_manager = self.build_manager
for node in nodes:
node_state = self._getNodeState(node)
node_state.check_actual = False
if self._prebuild(node, node_state):
continue
vfile = vfiles[node.builder]
if build_manager.isActualNode(node, vfile):
build_manager.actualNodeStatus(node)
else:
build_manager.outdatedNodeStatus(node)
def close(self):
try:
self.task_manager.stop()
self._getFinishedNodes(block=False)
finally:
self.vfiles.close()
class BuildManager(object):
__slots__ = ('_nodes', '_built_targets', '_failed_nodes',
'_built_node_names', 'completed', 'actual', 'explain')
def __init__(self):
self._nodes = _NodesTree()
self.__reset()
def __reset(self, build_always=False, explain=False):
self._built_targets = {}
self._failed_nodes = {}
self._built_node_names = set() if build_always else None
self.completed = 0
self.actual = 0
self.explain = explain
def add(self, nodes):
self._nodes.add(nodes)
def depends(self, node, deps):
self._nodes.depends(node, deps)
def __len__(self):
return len(self._nodes)
def selfTest(self):
self._nodes.selfTest()
def getTailNodes(self):
return self._nodes.popTails()
def actualNodeStatus(self, node):
eventNodeActual(node, self.getProgressStr())
self.actualNode(node)
def outdatedNodeStatus(self, node):
self._failed_nodes[node] = None
eventNodeOutdated(node, self.getProgressStr())
node.shrink()
def isActualNode(self, node, vfile):
return node.checkActual(vfile, self._built_node_names, self.explain)
def _addToBuiltNodeNames(self, node):
built_names = self._built_node_names
if built_names is not None:
built_names.update(node.getNames())
def completedSplitNode(self, node):
self._nodes.removeTail(node)
node.shrink()
def actualNode(self, node):
self._nodes.removeTail(node)
self.actual += 1
node.shrink()
def completedNode(self, node, builder_output):
self._checkAlreadyBuilt(node)
self._nodes.removeTail(node)
self._addToBuiltNodeNames(node)
self.completed += 1
eventNodeBuildingFinished(node, builder_output, self.getProgressStr())
node.shrink()
def failedNode(self, node, error):
self._failed_nodes[node] = error
eventNodeBuildingFailed(node, error)
def removedNode(self, node):
self._nodes.removeTail(node)
self.completed += 1
eventNodeRemoved(node, self.getProgressStr())
node.shrink()
def getProgressStr(self):
done = self.completed + self.actual
total = len(self._nodes) + done
processed = done + len(self._failed_nodes)
progress = '%s/%s' % (processed, total)
return progress
def close(self):
self._nodes = _NodesTree()
def _checkAlreadyBuilt(self, node):
values = node.getTargetValues()
built_targets = self._built_targets
for value in values:
value_sign = value.signature
other_value_sign = built_targets.setdefault(value.valueId(),
value_sign)
if other_value_sign != value_sign:
eventBuildTargetTwice(value, node)
def build(self, jobs, keep_going, nodes=None, build_always=False,
explain=False, with_backtrace=True):
self.__reset(build_always=build_always, explain=explain)
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo(nodes)
with _NodesBuilder(self, jobs, keep_going, with_backtrace
) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails and not nodes_builder.isBuilding():
break
nodes_builder.build(tails)
return self.isOk()
def isOk(self):
return not bool(self._failed_nodes)
def failsCount(self):
return len(self._failed_nodes)
def printFails(self):
for node, error in self._failed_nodes.items():
eventFailedNode(node, error)
def printBuildState(self):
logInfo('Failed nodes: %s' % len(self._failed_nodes))
logInfo('Completed nodes: %s' % self.completed)
logInfo('Actual nodes: %s' % self.actual)
def printStatusState(self):
logInfo('Outdated nodes: %s' % len(self._failed_nodes))
logInfo('Actual nodes: %s' % self.actual)
def clear(self, nodes=None):
self.__reset()
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo(nodes)
with _NodesBuilder(self) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails:
break
nodes_builder.clear(tails)
def status(self, nodes=None, explain=False):
self.__reset(explain=explain)
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo(nodes)
with _NodesBuilder(self) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails:
break
nodes_builder.status(tails)
return self.isOk()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@eventStatus
def eventNodeOutdated(settings, node, progress):
msg = '(%s) OUTDATED: %s' % (progress, node.getBuildStr(settings.brief))
logInfo(msg)
<|reserved_special_token_0|>
@eventStatus
def eventNodeRemoved(settings, node, progress):
msg = node.getBuildStr(settings.brief)
if msg:
logInfo('(%s) Removed: %s' % (progress, msg))
class ErrorNodeDependencyCyclic(AqlException):
def __init__(self, node, deps):
msg = "Node '%s' (%s) has a cyclic dependency: %s" % (node, node.
getBuildStr(True), deps)
super(ErrorNodeDependencyCyclic, self).__init__(msg)
class ErrorNodeUnknown(AqlException):
def __init__(self, node):
msg = "Unknown node '%s'" % (node,)
super(ErrorNodeUnknown, self).__init__(msg)
class ErrorNodeSignatureDifferent(AqlException):
def __init__(self, node):
msg = (
'Two similar nodes have different signatures (sources, builder parameters or dependencies): %s'
% (node.getBuildStr(brief=False),))
super(ErrorNodeSignatureDifferent, self).__init__(msg)
class ErrorNodeDependencyUnknown(AqlException):
def __init__(self, node, dep_node):
msg = "Unable to add dependency to node '%s' from node '%s'" % (node,
dep_node)
super(ErrorNodeDependencyUnknown, self).__init__(msg)
class InternalErrorRemoveNonTailNode(AqlException):
def __init__(self, node):
msg = 'Removing non-tail node: %s' % (node,)
super(InternalErrorRemoveNonTailNode, self).__init__(msg)
class InternalErrorRemoveUnknownTailNode(AqlException):
def __init__(self, node):
msg = 'Remove unknown tail node: : %s' % (node,)
super(InternalErrorRemoveUnknownTailNode, self).__init__(msg)
class BuildStat(object):
__slots__ = 'total', 'completed', 'failed'
def __init__(self, total):
self.total = total
self.completed = 0
self.failed = 0
def addTotal(self, count):
self.total += count
def incCompleted(self):
self.completed += 1
def incFailed(self):
self.failed += 1
def getProgressStr(self):
progress = '%s/%s' % (self.completed + self.failed, self.total)
return progress
class _NodesTree(object):
__slots__ = 'node2deps', 'dep2nodes', 'tail_nodes'
def __init__(self):
self.node2deps = {}
self.dep2nodes = {}
self.tail_nodes = set()
def __len__(self):
return len(self.node2deps)
def __hasCycle(self, node, new_deps):
if node in new_deps:
return True
deps = set(new_deps)
node2deps = self.node2deps
while deps:
dep = deps.pop()
dep_deps = node2deps[dep]
if node in dep_deps:
return True
deps |= dep_deps
return False
def __depends(self, node, deps):
node2deps = self.node2deps
dep2nodes = self.dep2nodes
try:
current_node_deps = node2deps[node]
deps = {dep for dep in deps if not dep.isBuilt()}
new_deps = deps - current_node_deps
if not new_deps:
return
if self.__hasCycle(node, new_deps):
raise ErrorNodeDependencyCyclic(node, new_deps)
self.tail_nodes.discard(node)
current_node_deps.update(new_deps)
for dep in new_deps:
dep2nodes[dep].add(node)
except KeyError as dep_node:
raise ErrorNodeDependencyUnknown(node, dep_node.args[0])
def __add(self, nodes):
for node in nodes:
if node not in self.node2deps:
self.node2deps[node] = set()
self.dep2nodes[node] = set()
self.tail_nodes.add(node)
node_srcnodes = node.getSourceNodes()
node_depnodes = node.getDepNodes()
self.__add(node_srcnodes)
self.__add(node_depnodes)
self.__depends(node, node_srcnodes)
self.__depends(node, node_depnodes)
def add(self, nodes):
self.__add(toSequence(nodes))
def depends(self, node, deps):
deps = toSequence(deps)
self.__add(deps)
self.__depends(node, deps)
def removeTail(self, node):
node2deps = self.node2deps
try:
deps = node2deps.pop(node)
if deps:
raise InternalErrorRemoveNonTailNode(node)
except KeyError as node:
raise InternalErrorRemoveUnknownTailNode(node.args[0])
tail_nodes = self.tail_nodes
for dep in self.dep2nodes.pop(node):
d = node2deps[dep]
d.remove(node)
if not d:
tail_nodes.add(dep)
def popTails(self):
tails = self.tail_nodes
self.tail_nodes = set()
return tails
def __getAllNodes(self, nodes):
nodes = set(toSequence(nodes))
all_nodes = set(nodes)
node2deps = self.node2deps
while nodes:
node = nodes.pop()
try:
deps = node2deps[node] - all_nodes
except KeyError as node:
raise ErrorNodeUnknown(node.args[0])
all_nodes.update(deps)
nodes.update(deps)
return all_nodes
def shrinkTo(self, nodes):
node2deps = self.node2deps
dep2nodes = self.dep2nodes
ignore_nodes = set(node2deps) - self.__getAllNodes(nodes)
self.tail_nodes -= ignore_nodes
for node in ignore_nodes:
del node2deps[node]
del dep2nodes[node]
for dep_nodes in dep2nodes.values():
dep_nodes.difference_update(ignore_nodes)
def selfTest(self):
if set(self.node2deps) != set(self.dep2nodes):
raise AssertionError('Not all deps are added')
all_dep_nodes = set()
for node in self.dep2nodes:
if node not in self.node2deps:
raise AssertionError('Missed node: %s' % (node,))
node_deps = self.node2deps[node]
if not node_deps:
if node not in self.tail_nodes:
raise AssertionError(
'Missed tail node: %s, tail_nodes: %s' % (node,
self.tail_nodes))
elif node in self.tail_nodes:
raise AssertionError('Invalid tail node: %s' % (node,))
all_dep_nodes |= node_deps
for dep in node_deps:
if node not in self.dep2nodes[dep]:
raise AssertionError(
'node not in self.dep2nodes[dep]: dep: %s, node: %s' %
(dep, node))
if all_dep_nodes - set(self.dep2nodes):
raise AssertionError('Not all deps are added')
class _VFiles(object):
__slots__ = 'names', 'handles'
def __init__(self):
self.handles = {}
self.names = {}
def __iter__(self):
raise TypeError()
def __getitem__(self, builder):
builder_name = builder.name
try:
vfilename = self.names[builder_name]
except KeyError:
vfilename = os.path.join(builder.getBuildDir(), '.aql.db')
self.names[builder_name] = vfilename
try:
return self.handles[vfilename]
except KeyError:
vfile = ValuesFile(vfilename)
self.handles[vfilename] = vfile
return vfile
def close(self):
for vfile in self.handles.values():
vfile.close()
self.handles.clear()
self.names.clear()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, backtrace):
self.close()
<|reserved_special_token_0|>
class _NodeState(object):
__slots__ = ('initialized', 'check_depends', 'check_replace',
'check_split', 'check_actual', 'split_nodes')
def __init__(self):
self.initialized = False
self.check_depends = True
self.check_replace = True
self.check_split = True
self.check_actual = True
self.split_nodes = None
def __str__(self):
return (
'initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s'
% (self.initialized, self.check_depends, self.check_replace,
self.check_split, self.check_actual, self.split_nodes))
class _NodesBuilder(object):
__slots__ = ('vfiles', 'build_manager', 'task_manager', 'node_states',
'building_nodes')
def __init__(self, build_manager, jobs=0, keep_going=False,
with_backtrace=True):
self.vfiles = _VFiles()
self.node_states = {}
self.building_nodes = {}
self.build_manager = build_manager
self.task_manager = TaskManager(num_threads=jobs, stop_on_fail=not
keep_going, with_backtrace=with_backtrace)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, backtrace):
self.close()
def _getNodeState(self, node):
try:
state = self.node_states[node]
except KeyError:
state = _NodeState()
self.node_states[node] = state
return state
def _removeNodeState(self, node):
try:
del self.node_states[node]
except KeyError:
pass
def _addBuildingNode(self, node, state):
conflicting_nodes = []
building_nodes = self.building_nodes
for name, signature in node.getNamesAndSignatures():
node_signature = node, signature
other_node, other_signature = building_nodes.setdefault(name,
node_signature)
if other_node is not node:
if other_signature != signature:
raise ErrorNodeSignatureDifferent(node)
conflicting_nodes.append(other_node)
if conflicting_nodes:
state.check_actual = True
self.build_manager.depends(node, conflicting_nodes)
return True
return False
def _removeBuildingNode(self, node):
building_nodes = self.building_nodes
for name in node.getNames():
del building_nodes[name]
def isBuilding(self):
return bool(self.building_nodes)
def _checkPrebuildDepends(self, node):
dep_nodes = node.buildDepends()
if dep_nodes:
self.build_manager.depends(node, dep_nodes)
return True
return False
def _checkPrebuildReplace(self, node):
if node.buildReplace():
new_node_sources = node.getSourceNodes()
if new_node_sources:
self.build_manager.depends(node, new_node_sources)
return True
return False
def _checkPrebuildSplit(self, node, state):
build_manager = self.build_manager
if state.check_split:
state.check_split = False
check_actual = True
if node.isBatch() and state.check_actual:
vfile = self.vfiles[node.builder]
actual = build_manager.isActualNode(node, vfile)
if actual:
self._removeNodeState(node)
build_manager.actualNode(node)
return True
check_actual = False
split_nodes = node.buildSplit()
if split_nodes:
state.split_nodes = split_nodes
for split_node in split_nodes:
split_state = self._getNodeState(split_node)
split_state.check_split = False
split_state.check_depends = False
split_state.check_replace = False
split_state.check_actual = check_actual
split_state.initialized = (split_node.builder is node.
builder)
self.build_manager.depends(node, split_nodes)
return True
elif state.split_nodes is not None:
if node.isBatch():
node._populateTargets()
else:
targets = []
for split_node in state.split_nodes:
targets += split_node.getTargetValues()
node.target_values = targets
self._removeNodeState(node)
self.build_manager.completedSplitNode(node)
return True
return False
def _prebuild(self, node, state):
if not state.initialized:
node.initiate()
state.initialized = True
if state.check_depends:
state.check_depends = False
if self._checkPrebuildDepends(node):
return True
if state.check_replace:
state.check_replace = False
if self._checkPrebuildReplace(node):
return True
if self._checkPrebuildSplit(node, state):
return True
return False
def build(self, nodes):
build_manager = self.build_manager
vfiles = self.vfiles
addTask = self.task_manager.addTask
tasks_check_period = 10
added_tasks = 0
changed = False
for node in nodes:
node_state = self._getNodeState(node)
if self._prebuild(node, node_state):
changed = True
continue
if self._addBuildingNode(node, node_state):
continue
if node_state.check_actual:
vfile = vfiles[node.builder]
actual = build_manager.isActualNode(node, vfile)
if actual:
self._removeNodeState(node)
self._removeBuildingNode(node)
build_manager.actualNode(node)
changed = True
continue
addTask(node, _buildNode, node)
added_tasks += 1
if added_tasks == tasks_check_period:
changed = self._getFinishedNodes(block=False) or changed
added_tasks = 0
self._getFinishedNodes(block=not changed)
def _getFinishedNodes(self, block=True):
finished_tasks = self.task_manager.finishedTasks(block=block)
vfiles = self.vfiles
build_manager = self.build_manager
for task in finished_tasks:
node = task.task_id
error = task.error
self._removeNodeState(node)
self._removeBuildingNode(node)
vfile = vfiles[node.builder]
if error is None:
node.save(vfile)
build_manager.completedNode(node, task.result)
else:
if node.isBatch():
node.save(vfile)
build_manager.failedNode(node, error)
return bool(finished_tasks)
def clear(self, nodes):
vfiles = self.vfiles
build_manager = self.build_manager
for node in nodes:
node_state = self._getNodeState(node)
node_state.check_actual = False
if self._prebuild(node, node_state):
continue
vfile = vfiles[node.builder]
node.clear(vfile)
build_manager.removedNode(node)
def status(self, nodes):
vfiles = self.vfiles
build_manager = self.build_manager
for node in nodes:
node_state = self._getNodeState(node)
node_state.check_actual = False
if self._prebuild(node, node_state):
continue
vfile = vfiles[node.builder]
if build_manager.isActualNode(node, vfile):
build_manager.actualNodeStatus(node)
else:
build_manager.outdatedNodeStatus(node)
def close(self):
try:
self.task_manager.stop()
self._getFinishedNodes(block=False)
finally:
self.vfiles.close()
class BuildManager(object):
__slots__ = ('_nodes', '_built_targets', '_failed_nodes',
'_built_node_names', 'completed', 'actual', 'explain')
def __init__(self):
self._nodes = _NodesTree()
self.__reset()
def __reset(self, build_always=False, explain=False):
self._built_targets = {}
self._failed_nodes = {}
self._built_node_names = set() if build_always else None
self.completed = 0
self.actual = 0
self.explain = explain
def add(self, nodes):
self._nodes.add(nodes)
def depends(self, node, deps):
self._nodes.depends(node, deps)
def __len__(self):
return len(self._nodes)
def selfTest(self):
self._nodes.selfTest()
def getTailNodes(self):
return self._nodes.popTails()
def actualNodeStatus(self, node):
eventNodeActual(node, self.getProgressStr())
self.actualNode(node)
def outdatedNodeStatus(self, node):
self._failed_nodes[node] = None
eventNodeOutdated(node, self.getProgressStr())
node.shrink()
def isActualNode(self, node, vfile):
return node.checkActual(vfile, self._built_node_names, self.explain)
def _addToBuiltNodeNames(self, node):
built_names = self._built_node_names
if built_names is not None:
built_names.update(node.getNames())
def completedSplitNode(self, node):
self._nodes.removeTail(node)
node.shrink()
def actualNode(self, node):
self._nodes.removeTail(node)
self.actual += 1
node.shrink()
def completedNode(self, node, builder_output):
self._checkAlreadyBuilt(node)
self._nodes.removeTail(node)
self._addToBuiltNodeNames(node)
self.completed += 1
eventNodeBuildingFinished(node, builder_output, self.getProgressStr())
node.shrink()
def failedNode(self, node, error):
self._failed_nodes[node] = error
eventNodeBuildingFailed(node, error)
def removedNode(self, node):
self._nodes.removeTail(node)
self.completed += 1
eventNodeRemoved(node, self.getProgressStr())
node.shrink()
def getProgressStr(self):
done = self.completed + self.actual
total = len(self._nodes) + done
processed = done + len(self._failed_nodes)
progress = '%s/%s' % (processed, total)
return progress
def close(self):
self._nodes = _NodesTree()
def _checkAlreadyBuilt(self, node):
values = node.getTargetValues()
built_targets = self._built_targets
for value in values:
value_sign = value.signature
other_value_sign = built_targets.setdefault(value.valueId(),
value_sign)
if other_value_sign != value_sign:
eventBuildTargetTwice(value, node)
def build(self, jobs, keep_going, nodes=None, build_always=False,
explain=False, with_backtrace=True):
self.__reset(build_always=build_always, explain=explain)
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo(nodes)
with _NodesBuilder(self, jobs, keep_going, with_backtrace
) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails and not nodes_builder.isBuilding():
break
nodes_builder.build(tails)
return self.isOk()
def isOk(self):
return not bool(self._failed_nodes)
def failsCount(self):
return len(self._failed_nodes)
def printFails(self):
for node, error in self._failed_nodes.items():
eventFailedNode(node, error)
def printBuildState(self):
logInfo('Failed nodes: %s' % len(self._failed_nodes))
logInfo('Completed nodes: %s' % self.completed)
logInfo('Actual nodes: %s' % self.actual)
def printStatusState(self):
logInfo('Outdated nodes: %s' % len(self._failed_nodes))
logInfo('Actual nodes: %s' % self.actual)
def clear(self, nodes=None):
self.__reset()
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo(nodes)
with _NodesBuilder(self) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails:
break
nodes_builder.clear(tails)
def status(self, nodes=None, explain=False):
self.__reset(explain=explain)
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo(nodes)
with _NodesBuilder(self) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails:
break
nodes_builder.status(tails)
return self.isOk()
<|reserved_special_token_1|>
#
# Copyright (c) 2011-2014 The developers of Aqualid project - http://aqualid.googlecode.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom
# the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE
# AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__all__ = (
'BuildManager',
'ErrorNodeDependencyCyclic', 'ErrorNodeDependencyUnknown',
)
import os.path
from aql.util_types import toSequence, AqlException
from aql.utils import eventStatus, eventWarning, eventError, logInfo, logError, logWarning, TaskManager
from aql.values import ValuesFile
#//===========================================================================//
@eventStatus
def eventNodeActual( settings, node, progress ):
msg = "(%s) ACTUAL: %s" % (progress, node.getBuildStr( settings.brief ))
logInfo( msg )
#//===========================================================================//
@eventStatus
def eventNodeOutdated( settings, node, progress ):
msg = "(%s) OUTDATED: %s" % (progress, node.getBuildStr( settings.brief ))
logInfo( msg )
#//===========================================================================//
@eventWarning
def eventBuildTargetTwice( settings, value, node1 ):
logWarning("Target '%s' is built twice. The last time built by: '%s' " %
( value.name, node1.getBuildStr( settings.brief )) )
#//===========================================================================//
@eventError
def eventFailedNode( settings, node, error ):
msg = node.getBuildStr( settings.brief )
msg += '\n\n%s\n' % (error,)
logError( msg )
#//===========================================================================//
@eventStatus
def eventNodeBuilding( settings, node ):
pass
#//===========================================================================//
@eventStatus
def eventNodeBuildingFinished( settings, node, builder_output, progress ):
msg = node.getBuildStr( settings.brief )
if settings.with_output and builder_output:
msg += '\n'
if builder_output:
msg += builder_output
msg += '\n'
msg = "(%s) %s" % (progress, msg)
logInfo( msg )
#//===========================================================================//
@eventStatus
def eventNodeBuildingFailed( settings, node, error ):
pass
#//===========================================================================//
@eventStatus
def eventNodeRemoved( settings, node, progress ):
msg = node.getBuildStr( settings.brief )
if msg:
logInfo( "(%s) Removed: %s" % (progress, msg) )
#//===========================================================================//
class ErrorNodeDependencyCyclic( AqlException ):
def __init__( self, node, deps ):
msg = "Node '%s' (%s) has a cyclic dependency: %s" % (node, node.getBuildStr(True), deps )
super(ErrorNodeDependencyCyclic, self).__init__( msg )
#//===========================================================================//
class ErrorNodeUnknown(AqlException):
def __init__( self, node ):
msg = "Unknown node '%s'" % (node, )
super(ErrorNodeUnknown, self).__init__( msg )
#//===========================================================================//
class ErrorNodeSignatureDifferent(AqlException):
def __init__( self, node ):
msg = "Two similar nodes have different signatures (sources, builder parameters or dependencies): %s" % (node.getBuildStr( brief = False ), )
super(ErrorNodeSignatureDifferent, self).__init__( msg )
#//===========================================================================//
class ErrorNodeDependencyUnknown(AqlException):
def __init__( self, node, dep_node ):
msg = "Unable to add dependency to node '%s' from node '%s'" % (node, dep_node)
super(ErrorNodeDependencyUnknown, self).__init__( msg )
#//===========================================================================//
class InternalErrorRemoveNonTailNode( AqlException ):
def __init__( self, node ):
msg = "Removing non-tail node: %s" % (node,)
super(InternalErrorRemoveNonTailNode, self).__init__( msg )
#//===========================================================================//
class InternalErrorRemoveUnknownTailNode(AqlException):
def __init__( self, node ):
msg = "Remove unknown tail node: : %s" % (node,)
super(InternalErrorRemoveUnknownTailNode, self).__init__( msg )
#//===========================================================================//
class BuildStat (object):
__slots__ = \
(
'total',
'completed',
'failed',
)
def __init__(self, total):
self.total = total
self.completed = 0
self.failed = 0
def addTotal(self, count ):
self.total += count
def incCompleted(self):
self.completed += 1
def incFailed(self):
self.failed += 1
def getProgressStr(self):
progress = "%s/%s" % (self.completed + self.failed, self.total )
return progress
#//===========================================================================//
class _NodesTree (object):
__slots__ = \
(
'node2deps',
'dep2nodes',
'tail_nodes',
)
#//-------------------------------------------------------//
def __init__( self ):
self.node2deps = {}
self.dep2nodes = {}
self.tail_nodes = set()
#//-------------------------------------------------------//
def __len__(self):
return len(self.node2deps)
#//-------------------------------------------------------//
def __hasCycle( self, node, new_deps ):
if node in new_deps:
return True
deps = set(new_deps)
node2deps = self.node2deps
while deps:
dep = deps.pop()
dep_deps = node2deps[dep]
if node in dep_deps:
return True
deps |= dep_deps
return False
#//-------------------------------------------------------//
def __depends( self, node, deps ):
node2deps = self.node2deps
dep2nodes = self.dep2nodes
try:
current_node_deps = node2deps[ node ]
deps = { dep for dep in deps if not dep.isBuilt() }
new_deps = deps - current_node_deps
if not new_deps:
return
if self.__hasCycle( node, new_deps ):
raise ErrorNodeDependencyCyclic( node, new_deps )
self.tail_nodes.discard( node )
#//-------------------------------------------------------//
current_node_deps.update( new_deps )
#//-------------------------------------------------------//
for dep in new_deps:
dep2nodes[ dep ].add( node )
except KeyError as dep_node:
raise ErrorNodeDependencyUnknown( node, dep_node.args[0] )
#//-------------------------------------------------------//
def __add( self, nodes ):
for node in nodes:
if node not in self.node2deps:
self.node2deps[ node ] = set()
self.dep2nodes[ node ] = set()
self.tail_nodes.add( node )
node_srcnodes = node.getSourceNodes()
node_depnodes = node.getDepNodes()
self.__add( node_srcnodes ) # TODO: recursively add sources and depends
self.__add( node_depnodes ) # It would be better to rewrite this code to avoid the recursion
self.__depends( node, node_srcnodes )
self.__depends( node, node_depnodes )
#//-------------------------------------------------------//
def add( self, nodes ):
self.__add( toSequence( nodes ) )
#//-------------------------------------------------------//
def depends( self, node, deps ):
deps = toSequence( deps )
self.__add( deps )
self.__depends( node, deps )
#//-------------------------------------------------------//
def removeTail( self, node ):
node2deps = self.node2deps
try:
deps = node2deps.pop(node)
if deps:
raise InternalErrorRemoveNonTailNode( node )
except KeyError as node:
raise InternalErrorRemoveUnknownTailNode( node.args[0] )
tail_nodes = self.tail_nodes
# tail_nodes.remove( node )
for dep in self.dep2nodes.pop( node ):
d = node2deps[ dep ]
d.remove( node )
if not d:
tail_nodes.add( dep )
#//-------------------------------------------------------//
def popTails( self ):
tails = self.tail_nodes
self.tail_nodes = set()
return tails
#//-------------------------------------------------------//
def __getAllNodes(self, nodes ):
nodes = set(toSequence(nodes))
all_nodes = set( nodes )
node2deps = self.node2deps
while nodes:
node = nodes.pop()
try:
deps = node2deps[ node ] - all_nodes
except KeyError as node:
raise ErrorNodeUnknown( node.args[0] )
all_nodes.update( deps )
nodes.update( deps )
return all_nodes
#//-------------------------------------------------------//
def shrinkTo(self, nodes ):
node2deps = self.node2deps
dep2nodes = self.dep2nodes
ignore_nodes = set(node2deps) - self.__getAllNodes( nodes )
self.tail_nodes -= ignore_nodes
for node in ignore_nodes:
del node2deps[ node ]
del dep2nodes[ node ]
for dep_nodes in dep2nodes.values():
dep_nodes.difference_update( ignore_nodes )
#//-------------------------------------------------------//
def selfTest( self ):
if set(self.node2deps) != set(self.dep2nodes):
raise AssertionError("Not all deps are added")
all_dep_nodes = set()
for node in self.dep2nodes:
if node not in self.node2deps:
raise AssertionError("Missed node: %s" % (node,) )
node_deps = self.node2deps[node]
if not node_deps:
if node not in self.tail_nodes:
raise AssertionError("Missed tail node: %s, tail_nodes: %s" % (node, self.tail_nodes) )
else:
if node in self.tail_nodes:
raise AssertionError("Invalid tail node: %s" % (node,) )
all_dep_nodes |= node_deps
for dep in node_deps:
if node not in self.dep2nodes[dep]:
raise AssertionError("node not in self.dep2nodes[dep]: dep: %s, node: %s" % (dep, node) )
if all_dep_nodes - set(self.dep2nodes):
raise AssertionError("Not all deps are added")
#//===========================================================================//
class _VFiles( object ):
__slots__ = \
(
'names',
'handles',
)
#//-------------------------------------------------------//
def __init__( self ):
self.handles = {}
self.names = {}
#//-------------------------------------------------------//
def __iter__(self):
raise TypeError()
#//-------------------------------------------------------//
def __getitem__( self, builder ):
builder_name = builder.name
try:
vfilename = self.names[ builder_name ]
except KeyError:
vfilename = os.path.join( builder.getBuildDir(), '.aql.db' )
self.names[ builder_name ] = vfilename
try:
return self.handles[ vfilename ]
except KeyError:
vfile = ValuesFile( vfilename )
self.handles[ vfilename ] = vfile
return vfile
#//-------------------------------------------------------//
def close(self):
for vfile in self.handles.values():
vfile.close()
self.handles.clear()
self.names.clear()
#//-------------------------------------------------------//
def __enter__(self):
return self
#//-------------------------------------------------------//
def __exit__(self, exc_type, exc_value, backtrace):
self.close()
#//===========================================================================//
def _buildNode( node ):
eventNodeBuilding( node )
out = node.build()
if out:
try:
out = out.strip()
except Exception:
pass
return out
#//===========================================================================//
class _NodeState( object ):
__slots__ = \
(
'initialized',
'check_depends',
'check_replace',
'check_split',
'check_actual',
'split_nodes',
)
def __init__(self ):
self.initialized = False
self.check_depends = True
self.check_replace = True
self.check_split = True
self.check_actual = True
self.split_nodes = None
def __str__(self):
return "initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s" %\
(self.initialized, self.check_depends, self.check_replace, self.check_split, self.check_actual, self.split_nodes )
#//===========================================================================//
# noinspection PyAttributeOutsideInit
class _NodesBuilder (object):
__slots__ = \
(
'vfiles',
'build_manager',
'task_manager',
'node_states',
'building_nodes',
)
#//-------------------------------------------------------//
def __init__( self, build_manager, jobs = 0, keep_going = False, with_backtrace = True ):
self.vfiles = _VFiles()
self.node_states = {}
self.building_nodes = {}
self.build_manager = build_manager
self.task_manager = TaskManager( num_threads = jobs, stop_on_fail = not keep_going, with_backtrace = with_backtrace )
#//-------------------------------------------------------//
def __enter__(self):
return self
#//-------------------------------------------------------//
def __exit__(self, exc_type, exc_value, backtrace):
self.close()
#//-------------------------------------------------------//
def _getNodeState( self, node ):
try:
state = self.node_states[ node ]
except KeyError:
state = _NodeState()
self.node_states[ node ] = state
return state
#//-------------------------------------------------------//
def _removeNodeState( self, node ):
try:
del self.node_states[ node ]
except KeyError:
pass
#//-------------------------------------------------------//
def _addBuildingNode( self, node, state ):
conflicting_nodes = []
building_nodes = self.building_nodes
for name, signature in node.getNamesAndSignatures():
node_signature = (node, signature)
other_node, other_signature = building_nodes.setdefault( name, node_signature )
if other_node is not node:
if other_signature != signature:
raise ErrorNodeSignatureDifferent( node )
conflicting_nodes.append( other_node )
if conflicting_nodes:
state.check_actual = True
self.build_manager.depends( node, conflicting_nodes )
return True
return False
#//-------------------------------------------------------//
def _removeBuildingNode( self, node ):
building_nodes = self.building_nodes
for name in node.getNames():
del building_nodes[ name ]
#//-------------------------------------------------------//
def isBuilding(self):
return bool(self.building_nodes)
#//-------------------------------------------------------//
def _checkPrebuildDepends( self, node ):
dep_nodes = node.buildDepends()
if dep_nodes:
self.build_manager.depends( node, dep_nodes )
return True
return False
#//-------------------------------------------------------//
def _checkPrebuildReplace( self, node ):
if node.buildReplace():
new_node_sources = node.getSourceNodes()
if new_node_sources:
self.build_manager.depends( node, new_node_sources )
return True
return False
#//-------------------------------------------------------//
def _checkPrebuildSplit( self, node, state ):
build_manager = self.build_manager
if state.check_split:
state.check_split = False
check_actual = True
if node.isBatch() and state.check_actual:
# Check for changed sources of BatchNode
vfile = self.vfiles[ node.builder ]
actual = build_manager.isActualNode( node, vfile )
if actual:
self._removeNodeState( node )
build_manager.actualNode( node )
return True
check_actual = False
split_nodes = node.buildSplit()
if split_nodes:
state.split_nodes = split_nodes
for split_node in split_nodes:
split_state = self._getNodeState( split_node )
split_state.check_split = False
split_state.check_depends = False
split_state.check_replace = False
split_state.check_actual = check_actual
split_state.initialized = split_node.builder is node.builder
self.build_manager.depends( node, split_nodes )
return True
elif state.split_nodes is not None:
if node.isBatch():
node._populateTargets()
else:
targets = []
for split_node in state.split_nodes:
targets += split_node.getTargetValues()
node.target_values = targets
self._removeNodeState( node )
self.build_manager.completedSplitNode( node )
return True
return False
#//-------------------------------------------------------//
def _prebuild( self, node, state ):
# print( "node: %s, state: %s" % (node, state))
if not state.initialized:
node.initiate()
state.initialized = True
if state.check_depends:
state.check_depends = False
if self._checkPrebuildDepends( node ):
return True
if state.check_replace:
state.check_replace = False
if self._checkPrebuildReplace( node ):
return True
if self._checkPrebuildSplit( node, state ):
return True
return False
#//-------------------------------------------------------//
def build( self, nodes ):
build_manager = self.build_manager
vfiles = self.vfiles
addTask = self.task_manager.addTask
tasks_check_period = 10
added_tasks = 0
changed = False
for node in nodes:
node_state = self._getNodeState( node )
if self._prebuild( node, node_state ):
changed = True
continue
if self._addBuildingNode( node, node_state ):
continue
if node_state.check_actual:
vfile = vfiles[ node.builder ]
actual = build_manager.isActualNode( node, vfile )
if actual:
self._removeNodeState( node )
self._removeBuildingNode( node )
build_manager.actualNode( node )
changed = True
continue
addTask( node, _buildNode, node )
added_tasks += 1
if added_tasks == tasks_check_period:
changed = self._getFinishedNodes( block = False ) or changed
added_tasks = 0
self._getFinishedNodes( block = not changed )
#//-------------------------------------------------------//
def _getFinishedNodes( self, block = True ):
# print("tasks: %s, finished_tasks: %s" % (self.task_manager.unfinished_tasks, self.task_manager.finished_tasks.qsize()))
finished_tasks = self.task_manager.finishedTasks( block = block )
vfiles = self.vfiles
build_manager = self.build_manager
for task in finished_tasks:
node = task.task_id
error = task.error
self._removeNodeState( node )
self._removeBuildingNode( node )
vfile = vfiles[ node.builder ]
if error is None:
node.save( vfile )
build_manager.completedNode( node, task.result )
else:
if node.isBatch():
node.save( vfile )
build_manager.failedNode( node, error )
return bool(finished_tasks)
#//-------------------------------------------------------//
def clear( self, nodes ):
vfiles = self.vfiles
build_manager = self.build_manager
for node in nodes:
node_state = self._getNodeState( node )
node_state.check_actual = False
if self._prebuild( node, node_state ):
continue
vfile = vfiles[ node.builder ]
node.clear( vfile )
build_manager.removedNode( node )
#//-------------------------------------------------------//
def status( self, nodes ):
vfiles = self.vfiles
build_manager = self.build_manager
for node in nodes:
node_state = self._getNodeState( node )
node_state.check_actual = False
if self._prebuild( node, node_state ):
continue
vfile = vfiles[ node.builder ]
if build_manager.isActualNode( node, vfile ):
build_manager.actualNodeStatus( node )
else:
build_manager.outdatedNodeStatus( node )
#//-------------------------------------------------------//
def close( self ):
try:
self.task_manager.stop()
self._getFinishedNodes( block = False )
finally:
self.vfiles.close()
#//===========================================================================//
class BuildManager (object):
__slots__ = \
(
'_nodes',
'_built_targets',
'_failed_nodes',
'_built_node_names',
'completed',
'actual',
'explain',
)
#//-------------------------------------------------------//
def __init__(self):
self._nodes = _NodesTree()
self.__reset()
#//-------------------------------------------------------//
def __reset(self, build_always = False, explain = False ):
self._built_targets = {}
self._failed_nodes = {}
self._built_node_names = set() if build_always else None
self.completed = 0
self.actual = 0
self.explain = explain
#//-------------------------------------------------------//
def add( self, nodes ):
self._nodes.add( nodes )
#//-------------------------------------------------------//
def depends( self, node, deps ):
self._nodes.depends( node, deps )
#//-------------------------------------------------------//
def __len__(self):
return len(self._nodes)
#//-------------------------------------------------------//
def selfTest( self ):
self._nodes.selfTest()
#//-------------------------------------------------------//
def getTailNodes(self):
return self._nodes.popTails()
#//-------------------------------------------------------//
def actualNodeStatus( self, node ):
eventNodeActual( node, self.getProgressStr() )
self.actualNode( node )
#//-------------------------------------------------------//
def outdatedNodeStatus( self, node ):
self._failed_nodes[ node ] = None
eventNodeOutdated( node, self.getProgressStr() )
node.shrink()
#//-------------------------------------------------------//
def isActualNode( self, node, vfile ):
return node.checkActual( vfile, self._built_node_names, self.explain )
#//-------------------------------------------------------//
def _addToBuiltNodeNames(self, node ):
built_names = self._built_node_names
if built_names is not None:
built_names.update( node.getNames() )
#//-------------------------------------------------------//
def completedSplitNode(self, node ):
self._nodes.removeTail( node )
node.shrink()
#//-------------------------------------------------------//
def actualNode( self, node ):
self._nodes.removeTail( node )
self.actual += 1
node.shrink()
#//-------------------------------------------------------//
def completedNode( self, node, builder_output ):
self._checkAlreadyBuilt( node )
self._nodes.removeTail( node )
self._addToBuiltNodeNames( node )
self.completed += 1
eventNodeBuildingFinished( node, builder_output, self.getProgressStr() )
node.shrink()
#//-------------------------------------------------------//
def failedNode( self, node, error ):
self._failed_nodes[ node ] = error
eventNodeBuildingFailed( node, error )
#//-------------------------------------------------------//
def removedNode( self, node ):
self._nodes.removeTail( node )
self.completed += 1
eventNodeRemoved( node, self.getProgressStr() )
node.shrink()
#//-------------------------------------------------------//
def getProgressStr(self):
done = self.completed + self.actual
total = len(self._nodes) + done
processed = done + len(self._failed_nodes)
progress = "%s/%s" % (processed, total)
return progress
#//-------------------------------------------------------//
def close( self ):
self._nodes = _NodesTree()
#//-------------------------------------------------------//
def _checkAlreadyBuilt( self, node ):
values = node.getTargetValues()
built_targets = self._built_targets
for value in values:
value_sign = value.signature
other_value_sign = built_targets.setdefault( value.valueId(), value_sign )
if other_value_sign != value_sign:
eventBuildTargetTwice( value, node )
#//-------------------------------------------------------//
def build( self, jobs, keep_going, nodes = None, build_always = False, explain = False, with_backtrace = True ):
self.__reset( build_always = build_always, explain = explain )
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo( nodes )
with _NodesBuilder( self, jobs, keep_going, with_backtrace ) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails and not nodes_builder.isBuilding():
break
nodes_builder.build( tails )
return self.isOk()
#//-------------------------------------------------------//
def isOk(self):
return not bool( self._failed_nodes )
#//-------------------------------------------------------//
def failsCount(self):
return len( self._failed_nodes )
#//-------------------------------------------------------//
def printFails(self ):
for node, error in self._failed_nodes.items():
eventFailedNode( node, error )
#//-------------------------------------------------------//
def printBuildState(self):
logInfo("Failed nodes: %s" % len(self._failed_nodes) )
logInfo("Completed nodes: %s" % self.completed )
logInfo("Actual nodes: %s" % self.actual )
#//-------------------------------------------------------//
def printStatusState(self):
logInfo("Outdated nodes: %s" % len(self._failed_nodes) )
logInfo("Actual nodes: %s" % self.actual )
#//-------------------------------------------------------//
def clear( self, nodes = None ):
self.__reset()
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo( nodes )
with _NodesBuilder( self ) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails:
break
nodes_builder.clear( tails )
#//-------------------------------------------------------//
def status( self, nodes = None, explain = False ):
self.__reset( explain = explain )
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo( nodes )
with _NodesBuilder( self ) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails:
break
nodes_builder.status( tails )
return self.isOk()
|
flexible
|
{
"blob_id": "cbfccffce2884e1cbebe21daf7792eebc1f88571",
"index": 6864,
"step-1": "<mask token>\n\n\nclass _NodesTree(object):\n <mask token>\n <mask token>\n <mask token>\n\n def __hasCycle(self, node, new_deps):\n if node in new_deps:\n return True\n deps = set(new_deps)\n node2deps = self.node2deps\n while deps:\n dep = deps.pop()\n dep_deps = node2deps[dep]\n if node in dep_deps:\n return True\n deps |= dep_deps\n return False\n\n def __depends(self, node, deps):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n try:\n current_node_deps = node2deps[node]\n deps = {dep for dep in deps if not dep.isBuilt()}\n new_deps = deps - current_node_deps\n if not new_deps:\n return\n if self.__hasCycle(node, new_deps):\n raise ErrorNodeDependencyCyclic(node, new_deps)\n self.tail_nodes.discard(node)\n current_node_deps.update(new_deps)\n for dep in new_deps:\n dep2nodes[dep].add(node)\n except KeyError as dep_node:\n raise ErrorNodeDependencyUnknown(node, dep_node.args[0])\n\n def __add(self, nodes):\n for node in nodes:\n if node not in self.node2deps:\n self.node2deps[node] = set()\n self.dep2nodes[node] = set()\n self.tail_nodes.add(node)\n node_srcnodes = node.getSourceNodes()\n node_depnodes = node.getDepNodes()\n self.__add(node_srcnodes)\n self.__add(node_depnodes)\n self.__depends(node, node_srcnodes)\n self.__depends(node, node_depnodes)\n <mask token>\n\n def depends(self, node, deps):\n deps = toSequence(deps)\n self.__add(deps)\n self.__depends(node, deps)\n\n def removeTail(self, node):\n node2deps = self.node2deps\n try:\n deps = node2deps.pop(node)\n if deps:\n raise InternalErrorRemoveNonTailNode(node)\n except KeyError as node:\n raise InternalErrorRemoveUnknownTailNode(node.args[0])\n tail_nodes = self.tail_nodes\n for dep in self.dep2nodes.pop(node):\n d = node2deps[dep]\n d.remove(node)\n if not d:\n tail_nodes.add(dep)\n <mask token>\n <mask token>\n\n def shrinkTo(self, nodes):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n ignore_nodes = set(node2deps) - self.__getAllNodes(nodes)\n self.tail_nodes -= ignore_nodes\n for node in ignore_nodes:\n del node2deps[node]\n del dep2nodes[node]\n for dep_nodes in dep2nodes.values():\n dep_nodes.difference_update(ignore_nodes)\n\n def selfTest(self):\n if set(self.node2deps) != set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n all_dep_nodes = set()\n for node in self.dep2nodes:\n if node not in self.node2deps:\n raise AssertionError('Missed node: %s' % (node,))\n node_deps = self.node2deps[node]\n if not node_deps:\n if node not in self.tail_nodes:\n raise AssertionError(\n 'Missed tail node: %s, tail_nodes: %s' % (node,\n self.tail_nodes))\n elif node in self.tail_nodes:\n raise AssertionError('Invalid tail node: %s' % (node,))\n all_dep_nodes |= node_deps\n for dep in node_deps:\n if node not in self.dep2nodes[dep]:\n raise AssertionError(\n 'node not in self.dep2nodes[dep]: dep: %s, node: %s' %\n (dep, node))\n if all_dep_nodes - set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n\n\nclass _VFiles(object):\n __slots__ = 'names', 'handles'\n\n def __init__(self):\n self.handles = {}\n self.names = {}\n\n def __iter__(self):\n raise TypeError()\n\n def __getitem__(self, builder):\n builder_name = builder.name\n try:\n vfilename = self.names[builder_name]\n except KeyError:\n vfilename = os.path.join(builder.getBuildDir(), '.aql.db')\n self.names[builder_name] = vfilename\n try:\n return self.handles[vfilename]\n except KeyError:\n vfile = ValuesFile(vfilename)\n self.handles[vfilename] = vfile\n return vfile\n\n def close(self):\n for vfile in self.handles.values():\n vfile.close()\n self.handles.clear()\n self.names.clear()\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n\n<mask token>\n\n\nclass _NodeState(object):\n __slots__ = ('initialized', 'check_depends', 'check_replace',\n 'check_split', 'check_actual', 'split_nodes')\n\n def __init__(self):\n self.initialized = False\n self.check_depends = True\n self.check_replace = True\n self.check_split = True\n self.check_actual = True\n self.split_nodes = None\n\n def __str__(self):\n return (\n 'initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s'\n % (self.initialized, self.check_depends, self.check_replace,\n self.check_split, self.check_actual, self.split_nodes))\n\n\nclass _NodesBuilder(object):\n __slots__ = ('vfiles', 'build_manager', 'task_manager', 'node_states',\n 'building_nodes')\n\n def __init__(self, build_manager, jobs=0, keep_going=False,\n with_backtrace=True):\n self.vfiles = _VFiles()\n self.node_states = {}\n self.building_nodes = {}\n self.build_manager = build_manager\n self.task_manager = TaskManager(num_threads=jobs, stop_on_fail=not\n keep_going, with_backtrace=with_backtrace)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n def _getNodeState(self, node):\n try:\n state = self.node_states[node]\n except KeyError:\n state = _NodeState()\n self.node_states[node] = state\n return state\n\n def _removeNodeState(self, node):\n try:\n del self.node_states[node]\n except KeyError:\n pass\n\n def _addBuildingNode(self, node, state):\n conflicting_nodes = []\n building_nodes = self.building_nodes\n for name, signature in node.getNamesAndSignatures():\n node_signature = node, signature\n other_node, other_signature = building_nodes.setdefault(name,\n node_signature)\n if other_node is not node:\n if other_signature != signature:\n raise ErrorNodeSignatureDifferent(node)\n conflicting_nodes.append(other_node)\n if conflicting_nodes:\n state.check_actual = True\n self.build_manager.depends(node, conflicting_nodes)\n return True\n return False\n\n def _removeBuildingNode(self, node):\n building_nodes = self.building_nodes\n for name in node.getNames():\n del building_nodes[name]\n\n def isBuilding(self):\n return bool(self.building_nodes)\n\n def _checkPrebuildDepends(self, node):\n dep_nodes = node.buildDepends()\n if dep_nodes:\n self.build_manager.depends(node, dep_nodes)\n return True\n return False\n\n def _checkPrebuildReplace(self, node):\n if node.buildReplace():\n new_node_sources = node.getSourceNodes()\n if new_node_sources:\n self.build_manager.depends(node, new_node_sources)\n return True\n return False\n\n def _checkPrebuildSplit(self, node, state):\n build_manager = self.build_manager\n if state.check_split:\n state.check_split = False\n check_actual = True\n if node.isBatch() and state.check_actual:\n vfile = self.vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n build_manager.actualNode(node)\n return True\n check_actual = False\n split_nodes = node.buildSplit()\n if split_nodes:\n state.split_nodes = split_nodes\n for split_node in split_nodes:\n split_state = self._getNodeState(split_node)\n split_state.check_split = False\n split_state.check_depends = False\n split_state.check_replace = False\n split_state.check_actual = check_actual\n split_state.initialized = (split_node.builder is node.\n builder)\n self.build_manager.depends(node, split_nodes)\n return True\n elif state.split_nodes is not None:\n if node.isBatch():\n node._populateTargets()\n else:\n targets = []\n for split_node in state.split_nodes:\n targets += split_node.getTargetValues()\n node.target_values = targets\n self._removeNodeState(node)\n self.build_manager.completedSplitNode(node)\n return True\n return False\n\n def _prebuild(self, node, state):\n if not state.initialized:\n node.initiate()\n state.initialized = True\n if state.check_depends:\n state.check_depends = False\n if self._checkPrebuildDepends(node):\n return True\n if state.check_replace:\n state.check_replace = False\n if self._checkPrebuildReplace(node):\n return True\n if self._checkPrebuildSplit(node, state):\n return True\n return False\n\n def build(self, nodes):\n build_manager = self.build_manager\n vfiles = self.vfiles\n addTask = self.task_manager.addTask\n tasks_check_period = 10\n added_tasks = 0\n changed = False\n for node in nodes:\n node_state = self._getNodeState(node)\n if self._prebuild(node, node_state):\n changed = True\n continue\n if self._addBuildingNode(node, node_state):\n continue\n if node_state.check_actual:\n vfile = vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n build_manager.actualNode(node)\n changed = True\n continue\n addTask(node, _buildNode, node)\n added_tasks += 1\n if added_tasks == tasks_check_period:\n changed = self._getFinishedNodes(block=False) or changed\n added_tasks = 0\n self._getFinishedNodes(block=not changed)\n\n def _getFinishedNodes(self, block=True):\n finished_tasks = self.task_manager.finishedTasks(block=block)\n vfiles = self.vfiles\n build_manager = self.build_manager\n for task in finished_tasks:\n node = task.task_id\n error = task.error\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n vfile = vfiles[node.builder]\n if error is None:\n node.save(vfile)\n build_manager.completedNode(node, task.result)\n else:\n if node.isBatch():\n node.save(vfile)\n build_manager.failedNode(node, error)\n return bool(finished_tasks)\n\n def clear(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n node.clear(vfile)\n build_manager.removedNode(node)\n\n def status(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n if build_manager.isActualNode(node, vfile):\n build_manager.actualNodeStatus(node)\n else:\n build_manager.outdatedNodeStatus(node)\n\n def close(self):\n try:\n self.task_manager.stop()\n self._getFinishedNodes(block=False)\n finally:\n self.vfiles.close()\n\n\nclass BuildManager(object):\n __slots__ = ('_nodes', '_built_targets', '_failed_nodes',\n '_built_node_names', 'completed', 'actual', 'explain')\n\n def __init__(self):\n self._nodes = _NodesTree()\n self.__reset()\n\n def __reset(self, build_always=False, explain=False):\n self._built_targets = {}\n self._failed_nodes = {}\n self._built_node_names = set() if build_always else None\n self.completed = 0\n self.actual = 0\n self.explain = explain\n\n def add(self, nodes):\n self._nodes.add(nodes)\n\n def depends(self, node, deps):\n self._nodes.depends(node, deps)\n\n def __len__(self):\n return len(self._nodes)\n\n def selfTest(self):\n self._nodes.selfTest()\n\n def getTailNodes(self):\n return self._nodes.popTails()\n\n def actualNodeStatus(self, node):\n eventNodeActual(node, self.getProgressStr())\n self.actualNode(node)\n\n def outdatedNodeStatus(self, node):\n self._failed_nodes[node] = None\n eventNodeOutdated(node, self.getProgressStr())\n node.shrink()\n\n def isActualNode(self, node, vfile):\n return node.checkActual(vfile, self._built_node_names, self.explain)\n\n def _addToBuiltNodeNames(self, node):\n built_names = self._built_node_names\n if built_names is not None:\n built_names.update(node.getNames())\n\n def completedSplitNode(self, node):\n self._nodes.removeTail(node)\n node.shrink()\n\n def actualNode(self, node):\n self._nodes.removeTail(node)\n self.actual += 1\n node.shrink()\n\n def completedNode(self, node, builder_output):\n self._checkAlreadyBuilt(node)\n self._nodes.removeTail(node)\n self._addToBuiltNodeNames(node)\n self.completed += 1\n eventNodeBuildingFinished(node, builder_output, self.getProgressStr())\n node.shrink()\n\n def failedNode(self, node, error):\n self._failed_nodes[node] = error\n eventNodeBuildingFailed(node, error)\n\n def removedNode(self, node):\n self._nodes.removeTail(node)\n self.completed += 1\n eventNodeRemoved(node, self.getProgressStr())\n node.shrink()\n\n def getProgressStr(self):\n done = self.completed + self.actual\n total = len(self._nodes) + done\n processed = done + len(self._failed_nodes)\n progress = '%s/%s' % (processed, total)\n return progress\n\n def close(self):\n self._nodes = _NodesTree()\n\n def _checkAlreadyBuilt(self, node):\n values = node.getTargetValues()\n built_targets = self._built_targets\n for value in values:\n value_sign = value.signature\n other_value_sign = built_targets.setdefault(value.valueId(),\n value_sign)\n if other_value_sign != value_sign:\n eventBuildTargetTwice(value, node)\n\n def build(self, jobs, keep_going, nodes=None, build_always=False,\n explain=False, with_backtrace=True):\n self.__reset(build_always=build_always, explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self, jobs, keep_going, with_backtrace\n ) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails and not nodes_builder.isBuilding():\n break\n nodes_builder.build(tails)\n return self.isOk()\n\n def isOk(self):\n return not bool(self._failed_nodes)\n\n def failsCount(self):\n return len(self._failed_nodes)\n\n def printFails(self):\n for node, error in self._failed_nodes.items():\n eventFailedNode(node, error)\n\n def printBuildState(self):\n logInfo('Failed nodes: %s' % len(self._failed_nodes))\n logInfo('Completed nodes: %s' % self.completed)\n logInfo('Actual nodes: %s' % self.actual)\n\n def printStatusState(self):\n logInfo('Outdated nodes: %s' % len(self._failed_nodes))\n logInfo('Actual nodes: %s' % self.actual)\n\n def clear(self, nodes=None):\n self.__reset()\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.clear(tails)\n\n def status(self, nodes=None, explain=False):\n self.__reset(explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.status(tails)\n return self.isOk()\n",
"step-2": "<mask token>\n\n\nclass InternalErrorRemoveNonTailNode(AqlException):\n <mask token>\n\n\nclass InternalErrorRemoveUnknownTailNode(AqlException):\n\n def __init__(self, node):\n msg = 'Remove unknown tail node: : %s' % (node,)\n super(InternalErrorRemoveUnknownTailNode, self).__init__(msg)\n\n\nclass BuildStat(object):\n __slots__ = 'total', 'completed', 'failed'\n\n def __init__(self, total):\n self.total = total\n self.completed = 0\n self.failed = 0\n\n def addTotal(self, count):\n self.total += count\n\n def incCompleted(self):\n self.completed += 1\n\n def incFailed(self):\n self.failed += 1\n\n def getProgressStr(self):\n progress = '%s/%s' % (self.completed + self.failed, self.total)\n return progress\n\n\nclass _NodesTree(object):\n __slots__ = 'node2deps', 'dep2nodes', 'tail_nodes'\n\n def __init__(self):\n self.node2deps = {}\n self.dep2nodes = {}\n self.tail_nodes = set()\n\n def __len__(self):\n return len(self.node2deps)\n\n def __hasCycle(self, node, new_deps):\n if node in new_deps:\n return True\n deps = set(new_deps)\n node2deps = self.node2deps\n while deps:\n dep = deps.pop()\n dep_deps = node2deps[dep]\n if node in dep_deps:\n return True\n deps |= dep_deps\n return False\n\n def __depends(self, node, deps):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n try:\n current_node_deps = node2deps[node]\n deps = {dep for dep in deps if not dep.isBuilt()}\n new_deps = deps - current_node_deps\n if not new_deps:\n return\n if self.__hasCycle(node, new_deps):\n raise ErrorNodeDependencyCyclic(node, new_deps)\n self.tail_nodes.discard(node)\n current_node_deps.update(new_deps)\n for dep in new_deps:\n dep2nodes[dep].add(node)\n except KeyError as dep_node:\n raise ErrorNodeDependencyUnknown(node, dep_node.args[0])\n\n def __add(self, nodes):\n for node in nodes:\n if node not in self.node2deps:\n self.node2deps[node] = set()\n self.dep2nodes[node] = set()\n self.tail_nodes.add(node)\n node_srcnodes = node.getSourceNodes()\n node_depnodes = node.getDepNodes()\n self.__add(node_srcnodes)\n self.__add(node_depnodes)\n self.__depends(node, node_srcnodes)\n self.__depends(node, node_depnodes)\n\n def add(self, nodes):\n self.__add(toSequence(nodes))\n\n def depends(self, node, deps):\n deps = toSequence(deps)\n self.__add(deps)\n self.__depends(node, deps)\n\n def removeTail(self, node):\n node2deps = self.node2deps\n try:\n deps = node2deps.pop(node)\n if deps:\n raise InternalErrorRemoveNonTailNode(node)\n except KeyError as node:\n raise InternalErrorRemoveUnknownTailNode(node.args[0])\n tail_nodes = self.tail_nodes\n for dep in self.dep2nodes.pop(node):\n d = node2deps[dep]\n d.remove(node)\n if not d:\n tail_nodes.add(dep)\n\n def popTails(self):\n tails = self.tail_nodes\n self.tail_nodes = set()\n return tails\n\n def __getAllNodes(self, nodes):\n nodes = set(toSequence(nodes))\n all_nodes = set(nodes)\n node2deps = self.node2deps\n while nodes:\n node = nodes.pop()\n try:\n deps = node2deps[node] - all_nodes\n except KeyError as node:\n raise ErrorNodeUnknown(node.args[0])\n all_nodes.update(deps)\n nodes.update(deps)\n return all_nodes\n\n def shrinkTo(self, nodes):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n ignore_nodes = set(node2deps) - self.__getAllNodes(nodes)\n self.tail_nodes -= ignore_nodes\n for node in ignore_nodes:\n del node2deps[node]\n del dep2nodes[node]\n for dep_nodes in dep2nodes.values():\n dep_nodes.difference_update(ignore_nodes)\n\n def selfTest(self):\n if set(self.node2deps) != set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n all_dep_nodes = set()\n for node in self.dep2nodes:\n if node not in self.node2deps:\n raise AssertionError('Missed node: %s' % (node,))\n node_deps = self.node2deps[node]\n if not node_deps:\n if node not in self.tail_nodes:\n raise AssertionError(\n 'Missed tail node: %s, tail_nodes: %s' % (node,\n self.tail_nodes))\n elif node in self.tail_nodes:\n raise AssertionError('Invalid tail node: %s' % (node,))\n all_dep_nodes |= node_deps\n for dep in node_deps:\n if node not in self.dep2nodes[dep]:\n raise AssertionError(\n 'node not in self.dep2nodes[dep]: dep: %s, node: %s' %\n (dep, node))\n if all_dep_nodes - set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n\n\nclass _VFiles(object):\n __slots__ = 'names', 'handles'\n\n def __init__(self):\n self.handles = {}\n self.names = {}\n\n def __iter__(self):\n raise TypeError()\n\n def __getitem__(self, builder):\n builder_name = builder.name\n try:\n vfilename = self.names[builder_name]\n except KeyError:\n vfilename = os.path.join(builder.getBuildDir(), '.aql.db')\n self.names[builder_name] = vfilename\n try:\n return self.handles[vfilename]\n except KeyError:\n vfile = ValuesFile(vfilename)\n self.handles[vfilename] = vfile\n return vfile\n\n def close(self):\n for vfile in self.handles.values():\n vfile.close()\n self.handles.clear()\n self.names.clear()\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n\n<mask token>\n\n\nclass _NodeState(object):\n __slots__ = ('initialized', 'check_depends', 'check_replace',\n 'check_split', 'check_actual', 'split_nodes')\n\n def __init__(self):\n self.initialized = False\n self.check_depends = True\n self.check_replace = True\n self.check_split = True\n self.check_actual = True\n self.split_nodes = None\n\n def __str__(self):\n return (\n 'initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s'\n % (self.initialized, self.check_depends, self.check_replace,\n self.check_split, self.check_actual, self.split_nodes))\n\n\nclass _NodesBuilder(object):\n __slots__ = ('vfiles', 'build_manager', 'task_manager', 'node_states',\n 'building_nodes')\n\n def __init__(self, build_manager, jobs=0, keep_going=False,\n with_backtrace=True):\n self.vfiles = _VFiles()\n self.node_states = {}\n self.building_nodes = {}\n self.build_manager = build_manager\n self.task_manager = TaskManager(num_threads=jobs, stop_on_fail=not\n keep_going, with_backtrace=with_backtrace)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n def _getNodeState(self, node):\n try:\n state = self.node_states[node]\n except KeyError:\n state = _NodeState()\n self.node_states[node] = state\n return state\n\n def _removeNodeState(self, node):\n try:\n del self.node_states[node]\n except KeyError:\n pass\n\n def _addBuildingNode(self, node, state):\n conflicting_nodes = []\n building_nodes = self.building_nodes\n for name, signature in node.getNamesAndSignatures():\n node_signature = node, signature\n other_node, other_signature = building_nodes.setdefault(name,\n node_signature)\n if other_node is not node:\n if other_signature != signature:\n raise ErrorNodeSignatureDifferent(node)\n conflicting_nodes.append(other_node)\n if conflicting_nodes:\n state.check_actual = True\n self.build_manager.depends(node, conflicting_nodes)\n return True\n return False\n\n def _removeBuildingNode(self, node):\n building_nodes = self.building_nodes\n for name in node.getNames():\n del building_nodes[name]\n\n def isBuilding(self):\n return bool(self.building_nodes)\n\n def _checkPrebuildDepends(self, node):\n dep_nodes = node.buildDepends()\n if dep_nodes:\n self.build_manager.depends(node, dep_nodes)\n return True\n return False\n\n def _checkPrebuildReplace(self, node):\n if node.buildReplace():\n new_node_sources = node.getSourceNodes()\n if new_node_sources:\n self.build_manager.depends(node, new_node_sources)\n return True\n return False\n\n def _checkPrebuildSplit(self, node, state):\n build_manager = self.build_manager\n if state.check_split:\n state.check_split = False\n check_actual = True\n if node.isBatch() and state.check_actual:\n vfile = self.vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n build_manager.actualNode(node)\n return True\n check_actual = False\n split_nodes = node.buildSplit()\n if split_nodes:\n state.split_nodes = split_nodes\n for split_node in split_nodes:\n split_state = self._getNodeState(split_node)\n split_state.check_split = False\n split_state.check_depends = False\n split_state.check_replace = False\n split_state.check_actual = check_actual\n split_state.initialized = (split_node.builder is node.\n builder)\n self.build_manager.depends(node, split_nodes)\n return True\n elif state.split_nodes is not None:\n if node.isBatch():\n node._populateTargets()\n else:\n targets = []\n for split_node in state.split_nodes:\n targets += split_node.getTargetValues()\n node.target_values = targets\n self._removeNodeState(node)\n self.build_manager.completedSplitNode(node)\n return True\n return False\n\n def _prebuild(self, node, state):\n if not state.initialized:\n node.initiate()\n state.initialized = True\n if state.check_depends:\n state.check_depends = False\n if self._checkPrebuildDepends(node):\n return True\n if state.check_replace:\n state.check_replace = False\n if self._checkPrebuildReplace(node):\n return True\n if self._checkPrebuildSplit(node, state):\n return True\n return False\n\n def build(self, nodes):\n build_manager = self.build_manager\n vfiles = self.vfiles\n addTask = self.task_manager.addTask\n tasks_check_period = 10\n added_tasks = 0\n changed = False\n for node in nodes:\n node_state = self._getNodeState(node)\n if self._prebuild(node, node_state):\n changed = True\n continue\n if self._addBuildingNode(node, node_state):\n continue\n if node_state.check_actual:\n vfile = vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n build_manager.actualNode(node)\n changed = True\n continue\n addTask(node, _buildNode, node)\n added_tasks += 1\n if added_tasks == tasks_check_period:\n changed = self._getFinishedNodes(block=False) or changed\n added_tasks = 0\n self._getFinishedNodes(block=not changed)\n\n def _getFinishedNodes(self, block=True):\n finished_tasks = self.task_manager.finishedTasks(block=block)\n vfiles = self.vfiles\n build_manager = self.build_manager\n for task in finished_tasks:\n node = task.task_id\n error = task.error\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n vfile = vfiles[node.builder]\n if error is None:\n node.save(vfile)\n build_manager.completedNode(node, task.result)\n else:\n if node.isBatch():\n node.save(vfile)\n build_manager.failedNode(node, error)\n return bool(finished_tasks)\n\n def clear(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n node.clear(vfile)\n build_manager.removedNode(node)\n\n def status(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n if build_manager.isActualNode(node, vfile):\n build_manager.actualNodeStatus(node)\n else:\n build_manager.outdatedNodeStatus(node)\n\n def close(self):\n try:\n self.task_manager.stop()\n self._getFinishedNodes(block=False)\n finally:\n self.vfiles.close()\n\n\nclass BuildManager(object):\n __slots__ = ('_nodes', '_built_targets', '_failed_nodes',\n '_built_node_names', 'completed', 'actual', 'explain')\n\n def __init__(self):\n self._nodes = _NodesTree()\n self.__reset()\n\n def __reset(self, build_always=False, explain=False):\n self._built_targets = {}\n self._failed_nodes = {}\n self._built_node_names = set() if build_always else None\n self.completed = 0\n self.actual = 0\n self.explain = explain\n\n def add(self, nodes):\n self._nodes.add(nodes)\n\n def depends(self, node, deps):\n self._nodes.depends(node, deps)\n\n def __len__(self):\n return len(self._nodes)\n\n def selfTest(self):\n self._nodes.selfTest()\n\n def getTailNodes(self):\n return self._nodes.popTails()\n\n def actualNodeStatus(self, node):\n eventNodeActual(node, self.getProgressStr())\n self.actualNode(node)\n\n def outdatedNodeStatus(self, node):\n self._failed_nodes[node] = None\n eventNodeOutdated(node, self.getProgressStr())\n node.shrink()\n\n def isActualNode(self, node, vfile):\n return node.checkActual(vfile, self._built_node_names, self.explain)\n\n def _addToBuiltNodeNames(self, node):\n built_names = self._built_node_names\n if built_names is not None:\n built_names.update(node.getNames())\n\n def completedSplitNode(self, node):\n self._nodes.removeTail(node)\n node.shrink()\n\n def actualNode(self, node):\n self._nodes.removeTail(node)\n self.actual += 1\n node.shrink()\n\n def completedNode(self, node, builder_output):\n self._checkAlreadyBuilt(node)\n self._nodes.removeTail(node)\n self._addToBuiltNodeNames(node)\n self.completed += 1\n eventNodeBuildingFinished(node, builder_output, self.getProgressStr())\n node.shrink()\n\n def failedNode(self, node, error):\n self._failed_nodes[node] = error\n eventNodeBuildingFailed(node, error)\n\n def removedNode(self, node):\n self._nodes.removeTail(node)\n self.completed += 1\n eventNodeRemoved(node, self.getProgressStr())\n node.shrink()\n\n def getProgressStr(self):\n done = self.completed + self.actual\n total = len(self._nodes) + done\n processed = done + len(self._failed_nodes)\n progress = '%s/%s' % (processed, total)\n return progress\n\n def close(self):\n self._nodes = _NodesTree()\n\n def _checkAlreadyBuilt(self, node):\n values = node.getTargetValues()\n built_targets = self._built_targets\n for value in values:\n value_sign = value.signature\n other_value_sign = built_targets.setdefault(value.valueId(),\n value_sign)\n if other_value_sign != value_sign:\n eventBuildTargetTwice(value, node)\n\n def build(self, jobs, keep_going, nodes=None, build_always=False,\n explain=False, with_backtrace=True):\n self.__reset(build_always=build_always, explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self, jobs, keep_going, with_backtrace\n ) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails and not nodes_builder.isBuilding():\n break\n nodes_builder.build(tails)\n return self.isOk()\n\n def isOk(self):\n return not bool(self._failed_nodes)\n\n def failsCount(self):\n return len(self._failed_nodes)\n\n def printFails(self):\n for node, error in self._failed_nodes.items():\n eventFailedNode(node, error)\n\n def printBuildState(self):\n logInfo('Failed nodes: %s' % len(self._failed_nodes))\n logInfo('Completed nodes: %s' % self.completed)\n logInfo('Actual nodes: %s' % self.actual)\n\n def printStatusState(self):\n logInfo('Outdated nodes: %s' % len(self._failed_nodes))\n logInfo('Actual nodes: %s' % self.actual)\n\n def clear(self, nodes=None):\n self.__reset()\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.clear(tails)\n\n def status(self, nodes=None, explain=False):\n self.__reset(explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.status(tails)\n return self.isOk()\n",
"step-3": "<mask token>\n\n\nclass ErrorNodeSignatureDifferent(AqlException):\n <mask token>\n\n\nclass ErrorNodeDependencyUnknown(AqlException):\n\n def __init__(self, node, dep_node):\n msg = \"Unable to add dependency to node '%s' from node '%s'\" % (node,\n dep_node)\n super(ErrorNodeDependencyUnknown, self).__init__(msg)\n\n\nclass InternalErrorRemoveNonTailNode(AqlException):\n\n def __init__(self, node):\n msg = 'Removing non-tail node: %s' % (node,)\n super(InternalErrorRemoveNonTailNode, self).__init__(msg)\n\n\nclass InternalErrorRemoveUnknownTailNode(AqlException):\n\n def __init__(self, node):\n msg = 'Remove unknown tail node: : %s' % (node,)\n super(InternalErrorRemoveUnknownTailNode, self).__init__(msg)\n\n\nclass BuildStat(object):\n __slots__ = 'total', 'completed', 'failed'\n\n def __init__(self, total):\n self.total = total\n self.completed = 0\n self.failed = 0\n\n def addTotal(self, count):\n self.total += count\n\n def incCompleted(self):\n self.completed += 1\n\n def incFailed(self):\n self.failed += 1\n\n def getProgressStr(self):\n progress = '%s/%s' % (self.completed + self.failed, self.total)\n return progress\n\n\nclass _NodesTree(object):\n __slots__ = 'node2deps', 'dep2nodes', 'tail_nodes'\n\n def __init__(self):\n self.node2deps = {}\n self.dep2nodes = {}\n self.tail_nodes = set()\n\n def __len__(self):\n return len(self.node2deps)\n\n def __hasCycle(self, node, new_deps):\n if node in new_deps:\n return True\n deps = set(new_deps)\n node2deps = self.node2deps\n while deps:\n dep = deps.pop()\n dep_deps = node2deps[dep]\n if node in dep_deps:\n return True\n deps |= dep_deps\n return False\n\n def __depends(self, node, deps):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n try:\n current_node_deps = node2deps[node]\n deps = {dep for dep in deps if not dep.isBuilt()}\n new_deps = deps - current_node_deps\n if not new_deps:\n return\n if self.__hasCycle(node, new_deps):\n raise ErrorNodeDependencyCyclic(node, new_deps)\n self.tail_nodes.discard(node)\n current_node_deps.update(new_deps)\n for dep in new_deps:\n dep2nodes[dep].add(node)\n except KeyError as dep_node:\n raise ErrorNodeDependencyUnknown(node, dep_node.args[0])\n\n def __add(self, nodes):\n for node in nodes:\n if node not in self.node2deps:\n self.node2deps[node] = set()\n self.dep2nodes[node] = set()\n self.tail_nodes.add(node)\n node_srcnodes = node.getSourceNodes()\n node_depnodes = node.getDepNodes()\n self.__add(node_srcnodes)\n self.__add(node_depnodes)\n self.__depends(node, node_srcnodes)\n self.__depends(node, node_depnodes)\n\n def add(self, nodes):\n self.__add(toSequence(nodes))\n\n def depends(self, node, deps):\n deps = toSequence(deps)\n self.__add(deps)\n self.__depends(node, deps)\n\n def removeTail(self, node):\n node2deps = self.node2deps\n try:\n deps = node2deps.pop(node)\n if deps:\n raise InternalErrorRemoveNonTailNode(node)\n except KeyError as node:\n raise InternalErrorRemoveUnknownTailNode(node.args[0])\n tail_nodes = self.tail_nodes\n for dep in self.dep2nodes.pop(node):\n d = node2deps[dep]\n d.remove(node)\n if not d:\n tail_nodes.add(dep)\n\n def popTails(self):\n tails = self.tail_nodes\n self.tail_nodes = set()\n return tails\n\n def __getAllNodes(self, nodes):\n nodes = set(toSequence(nodes))\n all_nodes = set(nodes)\n node2deps = self.node2deps\n while nodes:\n node = nodes.pop()\n try:\n deps = node2deps[node] - all_nodes\n except KeyError as node:\n raise ErrorNodeUnknown(node.args[0])\n all_nodes.update(deps)\n nodes.update(deps)\n return all_nodes\n\n def shrinkTo(self, nodes):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n ignore_nodes = set(node2deps) - self.__getAllNodes(nodes)\n self.tail_nodes -= ignore_nodes\n for node in ignore_nodes:\n del node2deps[node]\n del dep2nodes[node]\n for dep_nodes in dep2nodes.values():\n dep_nodes.difference_update(ignore_nodes)\n\n def selfTest(self):\n if set(self.node2deps) != set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n all_dep_nodes = set()\n for node in self.dep2nodes:\n if node not in self.node2deps:\n raise AssertionError('Missed node: %s' % (node,))\n node_deps = self.node2deps[node]\n if not node_deps:\n if node not in self.tail_nodes:\n raise AssertionError(\n 'Missed tail node: %s, tail_nodes: %s' % (node,\n self.tail_nodes))\n elif node in self.tail_nodes:\n raise AssertionError('Invalid tail node: %s' % (node,))\n all_dep_nodes |= node_deps\n for dep in node_deps:\n if node not in self.dep2nodes[dep]:\n raise AssertionError(\n 'node not in self.dep2nodes[dep]: dep: %s, node: %s' %\n (dep, node))\n if all_dep_nodes - set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n\n\nclass _VFiles(object):\n __slots__ = 'names', 'handles'\n\n def __init__(self):\n self.handles = {}\n self.names = {}\n\n def __iter__(self):\n raise TypeError()\n\n def __getitem__(self, builder):\n builder_name = builder.name\n try:\n vfilename = self.names[builder_name]\n except KeyError:\n vfilename = os.path.join(builder.getBuildDir(), '.aql.db')\n self.names[builder_name] = vfilename\n try:\n return self.handles[vfilename]\n except KeyError:\n vfile = ValuesFile(vfilename)\n self.handles[vfilename] = vfile\n return vfile\n\n def close(self):\n for vfile in self.handles.values():\n vfile.close()\n self.handles.clear()\n self.names.clear()\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n\n<mask token>\n\n\nclass _NodeState(object):\n __slots__ = ('initialized', 'check_depends', 'check_replace',\n 'check_split', 'check_actual', 'split_nodes')\n\n def __init__(self):\n self.initialized = False\n self.check_depends = True\n self.check_replace = True\n self.check_split = True\n self.check_actual = True\n self.split_nodes = None\n\n def __str__(self):\n return (\n 'initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s'\n % (self.initialized, self.check_depends, self.check_replace,\n self.check_split, self.check_actual, self.split_nodes))\n\n\nclass _NodesBuilder(object):\n __slots__ = ('vfiles', 'build_manager', 'task_manager', 'node_states',\n 'building_nodes')\n\n def __init__(self, build_manager, jobs=0, keep_going=False,\n with_backtrace=True):\n self.vfiles = _VFiles()\n self.node_states = {}\n self.building_nodes = {}\n self.build_manager = build_manager\n self.task_manager = TaskManager(num_threads=jobs, stop_on_fail=not\n keep_going, with_backtrace=with_backtrace)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n def _getNodeState(self, node):\n try:\n state = self.node_states[node]\n except KeyError:\n state = _NodeState()\n self.node_states[node] = state\n return state\n\n def _removeNodeState(self, node):\n try:\n del self.node_states[node]\n except KeyError:\n pass\n\n def _addBuildingNode(self, node, state):\n conflicting_nodes = []\n building_nodes = self.building_nodes\n for name, signature in node.getNamesAndSignatures():\n node_signature = node, signature\n other_node, other_signature = building_nodes.setdefault(name,\n node_signature)\n if other_node is not node:\n if other_signature != signature:\n raise ErrorNodeSignatureDifferent(node)\n conflicting_nodes.append(other_node)\n if conflicting_nodes:\n state.check_actual = True\n self.build_manager.depends(node, conflicting_nodes)\n return True\n return False\n\n def _removeBuildingNode(self, node):\n building_nodes = self.building_nodes\n for name in node.getNames():\n del building_nodes[name]\n\n def isBuilding(self):\n return bool(self.building_nodes)\n\n def _checkPrebuildDepends(self, node):\n dep_nodes = node.buildDepends()\n if dep_nodes:\n self.build_manager.depends(node, dep_nodes)\n return True\n return False\n\n def _checkPrebuildReplace(self, node):\n if node.buildReplace():\n new_node_sources = node.getSourceNodes()\n if new_node_sources:\n self.build_manager.depends(node, new_node_sources)\n return True\n return False\n\n def _checkPrebuildSplit(self, node, state):\n build_manager = self.build_manager\n if state.check_split:\n state.check_split = False\n check_actual = True\n if node.isBatch() and state.check_actual:\n vfile = self.vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n build_manager.actualNode(node)\n return True\n check_actual = False\n split_nodes = node.buildSplit()\n if split_nodes:\n state.split_nodes = split_nodes\n for split_node in split_nodes:\n split_state = self._getNodeState(split_node)\n split_state.check_split = False\n split_state.check_depends = False\n split_state.check_replace = False\n split_state.check_actual = check_actual\n split_state.initialized = (split_node.builder is node.\n builder)\n self.build_manager.depends(node, split_nodes)\n return True\n elif state.split_nodes is not None:\n if node.isBatch():\n node._populateTargets()\n else:\n targets = []\n for split_node in state.split_nodes:\n targets += split_node.getTargetValues()\n node.target_values = targets\n self._removeNodeState(node)\n self.build_manager.completedSplitNode(node)\n return True\n return False\n\n def _prebuild(self, node, state):\n if not state.initialized:\n node.initiate()\n state.initialized = True\n if state.check_depends:\n state.check_depends = False\n if self._checkPrebuildDepends(node):\n return True\n if state.check_replace:\n state.check_replace = False\n if self._checkPrebuildReplace(node):\n return True\n if self._checkPrebuildSplit(node, state):\n return True\n return False\n\n def build(self, nodes):\n build_manager = self.build_manager\n vfiles = self.vfiles\n addTask = self.task_manager.addTask\n tasks_check_period = 10\n added_tasks = 0\n changed = False\n for node in nodes:\n node_state = self._getNodeState(node)\n if self._prebuild(node, node_state):\n changed = True\n continue\n if self._addBuildingNode(node, node_state):\n continue\n if node_state.check_actual:\n vfile = vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n build_manager.actualNode(node)\n changed = True\n continue\n addTask(node, _buildNode, node)\n added_tasks += 1\n if added_tasks == tasks_check_period:\n changed = self._getFinishedNodes(block=False) or changed\n added_tasks = 0\n self._getFinishedNodes(block=not changed)\n\n def _getFinishedNodes(self, block=True):\n finished_tasks = self.task_manager.finishedTasks(block=block)\n vfiles = self.vfiles\n build_manager = self.build_manager\n for task in finished_tasks:\n node = task.task_id\n error = task.error\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n vfile = vfiles[node.builder]\n if error is None:\n node.save(vfile)\n build_manager.completedNode(node, task.result)\n else:\n if node.isBatch():\n node.save(vfile)\n build_manager.failedNode(node, error)\n return bool(finished_tasks)\n\n def clear(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n node.clear(vfile)\n build_manager.removedNode(node)\n\n def status(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n if build_manager.isActualNode(node, vfile):\n build_manager.actualNodeStatus(node)\n else:\n build_manager.outdatedNodeStatus(node)\n\n def close(self):\n try:\n self.task_manager.stop()\n self._getFinishedNodes(block=False)\n finally:\n self.vfiles.close()\n\n\nclass BuildManager(object):\n __slots__ = ('_nodes', '_built_targets', '_failed_nodes',\n '_built_node_names', 'completed', 'actual', 'explain')\n\n def __init__(self):\n self._nodes = _NodesTree()\n self.__reset()\n\n def __reset(self, build_always=False, explain=False):\n self._built_targets = {}\n self._failed_nodes = {}\n self._built_node_names = set() if build_always else None\n self.completed = 0\n self.actual = 0\n self.explain = explain\n\n def add(self, nodes):\n self._nodes.add(nodes)\n\n def depends(self, node, deps):\n self._nodes.depends(node, deps)\n\n def __len__(self):\n return len(self._nodes)\n\n def selfTest(self):\n self._nodes.selfTest()\n\n def getTailNodes(self):\n return self._nodes.popTails()\n\n def actualNodeStatus(self, node):\n eventNodeActual(node, self.getProgressStr())\n self.actualNode(node)\n\n def outdatedNodeStatus(self, node):\n self._failed_nodes[node] = None\n eventNodeOutdated(node, self.getProgressStr())\n node.shrink()\n\n def isActualNode(self, node, vfile):\n return node.checkActual(vfile, self._built_node_names, self.explain)\n\n def _addToBuiltNodeNames(self, node):\n built_names = self._built_node_names\n if built_names is not None:\n built_names.update(node.getNames())\n\n def completedSplitNode(self, node):\n self._nodes.removeTail(node)\n node.shrink()\n\n def actualNode(self, node):\n self._nodes.removeTail(node)\n self.actual += 1\n node.shrink()\n\n def completedNode(self, node, builder_output):\n self._checkAlreadyBuilt(node)\n self._nodes.removeTail(node)\n self._addToBuiltNodeNames(node)\n self.completed += 1\n eventNodeBuildingFinished(node, builder_output, self.getProgressStr())\n node.shrink()\n\n def failedNode(self, node, error):\n self._failed_nodes[node] = error\n eventNodeBuildingFailed(node, error)\n\n def removedNode(self, node):\n self._nodes.removeTail(node)\n self.completed += 1\n eventNodeRemoved(node, self.getProgressStr())\n node.shrink()\n\n def getProgressStr(self):\n done = self.completed + self.actual\n total = len(self._nodes) + done\n processed = done + len(self._failed_nodes)\n progress = '%s/%s' % (processed, total)\n return progress\n\n def close(self):\n self._nodes = _NodesTree()\n\n def _checkAlreadyBuilt(self, node):\n values = node.getTargetValues()\n built_targets = self._built_targets\n for value in values:\n value_sign = value.signature\n other_value_sign = built_targets.setdefault(value.valueId(),\n value_sign)\n if other_value_sign != value_sign:\n eventBuildTargetTwice(value, node)\n\n def build(self, jobs, keep_going, nodes=None, build_always=False,\n explain=False, with_backtrace=True):\n self.__reset(build_always=build_always, explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self, jobs, keep_going, with_backtrace\n ) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails and not nodes_builder.isBuilding():\n break\n nodes_builder.build(tails)\n return self.isOk()\n\n def isOk(self):\n return not bool(self._failed_nodes)\n\n def failsCount(self):\n return len(self._failed_nodes)\n\n def printFails(self):\n for node, error in self._failed_nodes.items():\n eventFailedNode(node, error)\n\n def printBuildState(self):\n logInfo('Failed nodes: %s' % len(self._failed_nodes))\n logInfo('Completed nodes: %s' % self.completed)\n logInfo('Actual nodes: %s' % self.actual)\n\n def printStatusState(self):\n logInfo('Outdated nodes: %s' % len(self._failed_nodes))\n logInfo('Actual nodes: %s' % self.actual)\n\n def clear(self, nodes=None):\n self.__reset()\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.clear(tails)\n\n def status(self, nodes=None, explain=False):\n self.__reset(explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.status(tails)\n return self.isOk()\n",
"step-4": "<mask token>\n\n\n@eventStatus\ndef eventNodeOutdated(settings, node, progress):\n msg = '(%s) OUTDATED: %s' % (progress, node.getBuildStr(settings.brief))\n logInfo(msg)\n\n\n<mask token>\n\n\n@eventStatus\ndef eventNodeRemoved(settings, node, progress):\n msg = node.getBuildStr(settings.brief)\n if msg:\n logInfo('(%s) Removed: %s' % (progress, msg))\n\n\nclass ErrorNodeDependencyCyclic(AqlException):\n\n def __init__(self, node, deps):\n msg = \"Node '%s' (%s) has a cyclic dependency: %s\" % (node, node.\n getBuildStr(True), deps)\n super(ErrorNodeDependencyCyclic, self).__init__(msg)\n\n\nclass ErrorNodeUnknown(AqlException):\n\n def __init__(self, node):\n msg = \"Unknown node '%s'\" % (node,)\n super(ErrorNodeUnknown, self).__init__(msg)\n\n\nclass ErrorNodeSignatureDifferent(AqlException):\n\n def __init__(self, node):\n msg = (\n 'Two similar nodes have different signatures (sources, builder parameters or dependencies): %s'\n % (node.getBuildStr(brief=False),))\n super(ErrorNodeSignatureDifferent, self).__init__(msg)\n\n\nclass ErrorNodeDependencyUnknown(AqlException):\n\n def __init__(self, node, dep_node):\n msg = \"Unable to add dependency to node '%s' from node '%s'\" % (node,\n dep_node)\n super(ErrorNodeDependencyUnknown, self).__init__(msg)\n\n\nclass InternalErrorRemoveNonTailNode(AqlException):\n\n def __init__(self, node):\n msg = 'Removing non-tail node: %s' % (node,)\n super(InternalErrorRemoveNonTailNode, self).__init__(msg)\n\n\nclass InternalErrorRemoveUnknownTailNode(AqlException):\n\n def __init__(self, node):\n msg = 'Remove unknown tail node: : %s' % (node,)\n super(InternalErrorRemoveUnknownTailNode, self).__init__(msg)\n\n\nclass BuildStat(object):\n __slots__ = 'total', 'completed', 'failed'\n\n def __init__(self, total):\n self.total = total\n self.completed = 0\n self.failed = 0\n\n def addTotal(self, count):\n self.total += count\n\n def incCompleted(self):\n self.completed += 1\n\n def incFailed(self):\n self.failed += 1\n\n def getProgressStr(self):\n progress = '%s/%s' % (self.completed + self.failed, self.total)\n return progress\n\n\nclass _NodesTree(object):\n __slots__ = 'node2deps', 'dep2nodes', 'tail_nodes'\n\n def __init__(self):\n self.node2deps = {}\n self.dep2nodes = {}\n self.tail_nodes = set()\n\n def __len__(self):\n return len(self.node2deps)\n\n def __hasCycle(self, node, new_deps):\n if node in new_deps:\n return True\n deps = set(new_deps)\n node2deps = self.node2deps\n while deps:\n dep = deps.pop()\n dep_deps = node2deps[dep]\n if node in dep_deps:\n return True\n deps |= dep_deps\n return False\n\n def __depends(self, node, deps):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n try:\n current_node_deps = node2deps[node]\n deps = {dep for dep in deps if not dep.isBuilt()}\n new_deps = deps - current_node_deps\n if not new_deps:\n return\n if self.__hasCycle(node, new_deps):\n raise ErrorNodeDependencyCyclic(node, new_deps)\n self.tail_nodes.discard(node)\n current_node_deps.update(new_deps)\n for dep in new_deps:\n dep2nodes[dep].add(node)\n except KeyError as dep_node:\n raise ErrorNodeDependencyUnknown(node, dep_node.args[0])\n\n def __add(self, nodes):\n for node in nodes:\n if node not in self.node2deps:\n self.node2deps[node] = set()\n self.dep2nodes[node] = set()\n self.tail_nodes.add(node)\n node_srcnodes = node.getSourceNodes()\n node_depnodes = node.getDepNodes()\n self.__add(node_srcnodes)\n self.__add(node_depnodes)\n self.__depends(node, node_srcnodes)\n self.__depends(node, node_depnodes)\n\n def add(self, nodes):\n self.__add(toSequence(nodes))\n\n def depends(self, node, deps):\n deps = toSequence(deps)\n self.__add(deps)\n self.__depends(node, deps)\n\n def removeTail(self, node):\n node2deps = self.node2deps\n try:\n deps = node2deps.pop(node)\n if deps:\n raise InternalErrorRemoveNonTailNode(node)\n except KeyError as node:\n raise InternalErrorRemoveUnknownTailNode(node.args[0])\n tail_nodes = self.tail_nodes\n for dep in self.dep2nodes.pop(node):\n d = node2deps[dep]\n d.remove(node)\n if not d:\n tail_nodes.add(dep)\n\n def popTails(self):\n tails = self.tail_nodes\n self.tail_nodes = set()\n return tails\n\n def __getAllNodes(self, nodes):\n nodes = set(toSequence(nodes))\n all_nodes = set(nodes)\n node2deps = self.node2deps\n while nodes:\n node = nodes.pop()\n try:\n deps = node2deps[node] - all_nodes\n except KeyError as node:\n raise ErrorNodeUnknown(node.args[0])\n all_nodes.update(deps)\n nodes.update(deps)\n return all_nodes\n\n def shrinkTo(self, nodes):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n ignore_nodes = set(node2deps) - self.__getAllNodes(nodes)\n self.tail_nodes -= ignore_nodes\n for node in ignore_nodes:\n del node2deps[node]\n del dep2nodes[node]\n for dep_nodes in dep2nodes.values():\n dep_nodes.difference_update(ignore_nodes)\n\n def selfTest(self):\n if set(self.node2deps) != set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n all_dep_nodes = set()\n for node in self.dep2nodes:\n if node not in self.node2deps:\n raise AssertionError('Missed node: %s' % (node,))\n node_deps = self.node2deps[node]\n if not node_deps:\n if node not in self.tail_nodes:\n raise AssertionError(\n 'Missed tail node: %s, tail_nodes: %s' % (node,\n self.tail_nodes))\n elif node in self.tail_nodes:\n raise AssertionError('Invalid tail node: %s' % (node,))\n all_dep_nodes |= node_deps\n for dep in node_deps:\n if node not in self.dep2nodes[dep]:\n raise AssertionError(\n 'node not in self.dep2nodes[dep]: dep: %s, node: %s' %\n (dep, node))\n if all_dep_nodes - set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n\n\nclass _VFiles(object):\n __slots__ = 'names', 'handles'\n\n def __init__(self):\n self.handles = {}\n self.names = {}\n\n def __iter__(self):\n raise TypeError()\n\n def __getitem__(self, builder):\n builder_name = builder.name\n try:\n vfilename = self.names[builder_name]\n except KeyError:\n vfilename = os.path.join(builder.getBuildDir(), '.aql.db')\n self.names[builder_name] = vfilename\n try:\n return self.handles[vfilename]\n except KeyError:\n vfile = ValuesFile(vfilename)\n self.handles[vfilename] = vfile\n return vfile\n\n def close(self):\n for vfile in self.handles.values():\n vfile.close()\n self.handles.clear()\n self.names.clear()\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n\n<mask token>\n\n\nclass _NodeState(object):\n __slots__ = ('initialized', 'check_depends', 'check_replace',\n 'check_split', 'check_actual', 'split_nodes')\n\n def __init__(self):\n self.initialized = False\n self.check_depends = True\n self.check_replace = True\n self.check_split = True\n self.check_actual = True\n self.split_nodes = None\n\n def __str__(self):\n return (\n 'initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s'\n % (self.initialized, self.check_depends, self.check_replace,\n self.check_split, self.check_actual, self.split_nodes))\n\n\nclass _NodesBuilder(object):\n __slots__ = ('vfiles', 'build_manager', 'task_manager', 'node_states',\n 'building_nodes')\n\n def __init__(self, build_manager, jobs=0, keep_going=False,\n with_backtrace=True):\n self.vfiles = _VFiles()\n self.node_states = {}\n self.building_nodes = {}\n self.build_manager = build_manager\n self.task_manager = TaskManager(num_threads=jobs, stop_on_fail=not\n keep_going, with_backtrace=with_backtrace)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n def _getNodeState(self, node):\n try:\n state = self.node_states[node]\n except KeyError:\n state = _NodeState()\n self.node_states[node] = state\n return state\n\n def _removeNodeState(self, node):\n try:\n del self.node_states[node]\n except KeyError:\n pass\n\n def _addBuildingNode(self, node, state):\n conflicting_nodes = []\n building_nodes = self.building_nodes\n for name, signature in node.getNamesAndSignatures():\n node_signature = node, signature\n other_node, other_signature = building_nodes.setdefault(name,\n node_signature)\n if other_node is not node:\n if other_signature != signature:\n raise ErrorNodeSignatureDifferent(node)\n conflicting_nodes.append(other_node)\n if conflicting_nodes:\n state.check_actual = True\n self.build_manager.depends(node, conflicting_nodes)\n return True\n return False\n\n def _removeBuildingNode(self, node):\n building_nodes = self.building_nodes\n for name in node.getNames():\n del building_nodes[name]\n\n def isBuilding(self):\n return bool(self.building_nodes)\n\n def _checkPrebuildDepends(self, node):\n dep_nodes = node.buildDepends()\n if dep_nodes:\n self.build_manager.depends(node, dep_nodes)\n return True\n return False\n\n def _checkPrebuildReplace(self, node):\n if node.buildReplace():\n new_node_sources = node.getSourceNodes()\n if new_node_sources:\n self.build_manager.depends(node, new_node_sources)\n return True\n return False\n\n def _checkPrebuildSplit(self, node, state):\n build_manager = self.build_manager\n if state.check_split:\n state.check_split = False\n check_actual = True\n if node.isBatch() and state.check_actual:\n vfile = self.vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n build_manager.actualNode(node)\n return True\n check_actual = False\n split_nodes = node.buildSplit()\n if split_nodes:\n state.split_nodes = split_nodes\n for split_node in split_nodes:\n split_state = self._getNodeState(split_node)\n split_state.check_split = False\n split_state.check_depends = False\n split_state.check_replace = False\n split_state.check_actual = check_actual\n split_state.initialized = (split_node.builder is node.\n builder)\n self.build_manager.depends(node, split_nodes)\n return True\n elif state.split_nodes is not None:\n if node.isBatch():\n node._populateTargets()\n else:\n targets = []\n for split_node in state.split_nodes:\n targets += split_node.getTargetValues()\n node.target_values = targets\n self._removeNodeState(node)\n self.build_manager.completedSplitNode(node)\n return True\n return False\n\n def _prebuild(self, node, state):\n if not state.initialized:\n node.initiate()\n state.initialized = True\n if state.check_depends:\n state.check_depends = False\n if self._checkPrebuildDepends(node):\n return True\n if state.check_replace:\n state.check_replace = False\n if self._checkPrebuildReplace(node):\n return True\n if self._checkPrebuildSplit(node, state):\n return True\n return False\n\n def build(self, nodes):\n build_manager = self.build_manager\n vfiles = self.vfiles\n addTask = self.task_manager.addTask\n tasks_check_period = 10\n added_tasks = 0\n changed = False\n for node in nodes:\n node_state = self._getNodeState(node)\n if self._prebuild(node, node_state):\n changed = True\n continue\n if self._addBuildingNode(node, node_state):\n continue\n if node_state.check_actual:\n vfile = vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n build_manager.actualNode(node)\n changed = True\n continue\n addTask(node, _buildNode, node)\n added_tasks += 1\n if added_tasks == tasks_check_period:\n changed = self._getFinishedNodes(block=False) or changed\n added_tasks = 0\n self._getFinishedNodes(block=not changed)\n\n def _getFinishedNodes(self, block=True):\n finished_tasks = self.task_manager.finishedTasks(block=block)\n vfiles = self.vfiles\n build_manager = self.build_manager\n for task in finished_tasks:\n node = task.task_id\n error = task.error\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n vfile = vfiles[node.builder]\n if error is None:\n node.save(vfile)\n build_manager.completedNode(node, task.result)\n else:\n if node.isBatch():\n node.save(vfile)\n build_manager.failedNode(node, error)\n return bool(finished_tasks)\n\n def clear(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n node.clear(vfile)\n build_manager.removedNode(node)\n\n def status(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n if build_manager.isActualNode(node, vfile):\n build_manager.actualNodeStatus(node)\n else:\n build_manager.outdatedNodeStatus(node)\n\n def close(self):\n try:\n self.task_manager.stop()\n self._getFinishedNodes(block=False)\n finally:\n self.vfiles.close()\n\n\nclass BuildManager(object):\n __slots__ = ('_nodes', '_built_targets', '_failed_nodes',\n '_built_node_names', 'completed', 'actual', 'explain')\n\n def __init__(self):\n self._nodes = _NodesTree()\n self.__reset()\n\n def __reset(self, build_always=False, explain=False):\n self._built_targets = {}\n self._failed_nodes = {}\n self._built_node_names = set() if build_always else None\n self.completed = 0\n self.actual = 0\n self.explain = explain\n\n def add(self, nodes):\n self._nodes.add(nodes)\n\n def depends(self, node, deps):\n self._nodes.depends(node, deps)\n\n def __len__(self):\n return len(self._nodes)\n\n def selfTest(self):\n self._nodes.selfTest()\n\n def getTailNodes(self):\n return self._nodes.popTails()\n\n def actualNodeStatus(self, node):\n eventNodeActual(node, self.getProgressStr())\n self.actualNode(node)\n\n def outdatedNodeStatus(self, node):\n self._failed_nodes[node] = None\n eventNodeOutdated(node, self.getProgressStr())\n node.shrink()\n\n def isActualNode(self, node, vfile):\n return node.checkActual(vfile, self._built_node_names, self.explain)\n\n def _addToBuiltNodeNames(self, node):\n built_names = self._built_node_names\n if built_names is not None:\n built_names.update(node.getNames())\n\n def completedSplitNode(self, node):\n self._nodes.removeTail(node)\n node.shrink()\n\n def actualNode(self, node):\n self._nodes.removeTail(node)\n self.actual += 1\n node.shrink()\n\n def completedNode(self, node, builder_output):\n self._checkAlreadyBuilt(node)\n self._nodes.removeTail(node)\n self._addToBuiltNodeNames(node)\n self.completed += 1\n eventNodeBuildingFinished(node, builder_output, self.getProgressStr())\n node.shrink()\n\n def failedNode(self, node, error):\n self._failed_nodes[node] = error\n eventNodeBuildingFailed(node, error)\n\n def removedNode(self, node):\n self._nodes.removeTail(node)\n self.completed += 1\n eventNodeRemoved(node, self.getProgressStr())\n node.shrink()\n\n def getProgressStr(self):\n done = self.completed + self.actual\n total = len(self._nodes) + done\n processed = done + len(self._failed_nodes)\n progress = '%s/%s' % (processed, total)\n return progress\n\n def close(self):\n self._nodes = _NodesTree()\n\n def _checkAlreadyBuilt(self, node):\n values = node.getTargetValues()\n built_targets = self._built_targets\n for value in values:\n value_sign = value.signature\n other_value_sign = built_targets.setdefault(value.valueId(),\n value_sign)\n if other_value_sign != value_sign:\n eventBuildTargetTwice(value, node)\n\n def build(self, jobs, keep_going, nodes=None, build_always=False,\n explain=False, with_backtrace=True):\n self.__reset(build_always=build_always, explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self, jobs, keep_going, with_backtrace\n ) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails and not nodes_builder.isBuilding():\n break\n nodes_builder.build(tails)\n return self.isOk()\n\n def isOk(self):\n return not bool(self._failed_nodes)\n\n def failsCount(self):\n return len(self._failed_nodes)\n\n def printFails(self):\n for node, error in self._failed_nodes.items():\n eventFailedNode(node, error)\n\n def printBuildState(self):\n logInfo('Failed nodes: %s' % len(self._failed_nodes))\n logInfo('Completed nodes: %s' % self.completed)\n logInfo('Actual nodes: %s' % self.actual)\n\n def printStatusState(self):\n logInfo('Outdated nodes: %s' % len(self._failed_nodes))\n logInfo('Actual nodes: %s' % self.actual)\n\n def clear(self, nodes=None):\n self.__reset()\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.clear(tails)\n\n def status(self, nodes=None, explain=False):\n self.__reset(explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.status(tails)\n return self.isOk()\n",
"step-5": "#\n# Copyright (c) 2011-2014 The developers of Aqualid project - http://aqualid.googlecode.com\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and\n# associated documentation files (the \"Software\"), to deal in the Software without restriction,\n# including without limitation the rights to use, copy, modify, merge, publish, distribute,\n# sublicense, and/or sell copies of the Software, and to permit persons to whom\n# the Software is furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in all copies or\n# substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE\n# AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n#\n\n__all__ = (\n 'BuildManager',\n 'ErrorNodeDependencyCyclic', 'ErrorNodeDependencyUnknown',\n)\n\nimport os.path\n\nfrom aql.util_types import toSequence, AqlException\nfrom aql.utils import eventStatus, eventWarning, eventError, logInfo, logError, logWarning, TaskManager\nfrom aql.values import ValuesFile\n\n#//===========================================================================//\n\n@eventStatus\ndef eventNodeActual( settings, node, progress ):\n msg = \"(%s) ACTUAL: %s\" % (progress, node.getBuildStr( settings.brief ))\n logInfo( msg )\n\n#//===========================================================================//\n\n@eventStatus\ndef eventNodeOutdated( settings, node, progress ):\n msg = \"(%s) OUTDATED: %s\" % (progress, node.getBuildStr( settings.brief ))\n logInfo( msg )\n\n#//===========================================================================//\n\n@eventWarning\ndef eventBuildTargetTwice( settings, value, node1 ):\n logWarning(\"Target '%s' is built twice. The last time built by: '%s' \" %\n ( value.name, node1.getBuildStr( settings.brief )) )\n\n#//===========================================================================//\n\n@eventError\ndef eventFailedNode( settings, node, error ):\n \n msg = node.getBuildStr( settings.brief )\n msg += '\\n\\n%s\\n' % (error,)\n \n logError( msg )\n\n#//===========================================================================//\n\n@eventStatus\ndef eventNodeBuilding( settings, node ):\n pass\n\n#//===========================================================================//\n\n@eventStatus\ndef eventNodeBuildingFinished( settings, node, builder_output, progress ):\n \n msg = node.getBuildStr( settings.brief )\n if settings.with_output and builder_output:\n msg += '\\n'\n if builder_output:\n msg += builder_output\n msg += '\\n'\n \n msg = \"(%s) %s\" % (progress, msg)\n \n logInfo( msg )\n\n#//===========================================================================//\n\n@eventStatus\ndef eventNodeBuildingFailed( settings, node, error ):\n pass\n\n#//===========================================================================//\n\n@eventStatus\ndef eventNodeRemoved( settings, node, progress ):\n msg = node.getBuildStr( settings.brief )\n if msg:\n logInfo( \"(%s) Removed: %s\" % (progress, msg) )\n\n#//===========================================================================//\n\nclass ErrorNodeDependencyCyclic( AqlException ):\n def __init__( self, node, deps ):\n msg = \"Node '%s' (%s) has a cyclic dependency: %s\" % (node, node.getBuildStr(True), deps )\n super(ErrorNodeDependencyCyclic, self).__init__( msg )\n\n#//===========================================================================//\n\nclass ErrorNodeUnknown(AqlException):\n def __init__( self, node ):\n msg = \"Unknown node '%s'\" % (node, )\n super(ErrorNodeUnknown, self).__init__( msg )\n\n#//===========================================================================//\n\nclass ErrorNodeSignatureDifferent(AqlException):\n def __init__( self, node ):\n msg = \"Two similar nodes have different signatures (sources, builder parameters or dependencies): %s\" % (node.getBuildStr( brief = False ), )\n super(ErrorNodeSignatureDifferent, self).__init__( msg )\n\n#//===========================================================================//\n\nclass ErrorNodeDependencyUnknown(AqlException):\n def __init__( self, node, dep_node ):\n msg = \"Unable to add dependency to node '%s' from node '%s'\" % (node, dep_node)\n super(ErrorNodeDependencyUnknown, self).__init__( msg )\n\n#//===========================================================================//\n\nclass InternalErrorRemoveNonTailNode( AqlException ):\n def __init__( self, node ):\n msg = \"Removing non-tail node: %s\" % (node,)\n super(InternalErrorRemoveNonTailNode, self).__init__( msg )\n\n#//===========================================================================//\n\nclass InternalErrorRemoveUnknownTailNode(AqlException):\n def __init__( self, node ):\n msg = \"Remove unknown tail node: : %s\" % (node,)\n super(InternalErrorRemoveUnknownTailNode, self).__init__( msg )\n\n#//===========================================================================//\n\nclass BuildStat (object):\n __slots__ = \\\n (\n 'total',\n 'completed',\n 'failed',\n )\n \n def __init__(self, total):\n self.total = total\n self.completed = 0\n self.failed = 0\n \n def addTotal(self, count ):\n self.total += count\n \n def incCompleted(self):\n self.completed += 1\n \n def incFailed(self):\n self.failed += 1\n \n def getProgressStr(self):\n progress = \"%s/%s\" % (self.completed + self.failed, self.total )\n return progress\n\n#//===========================================================================//\n\nclass _NodesTree (object):\n \n __slots__ = \\\n (\n 'node2deps',\n 'dep2nodes',\n 'tail_nodes',\n )\n \n #//-------------------------------------------------------//\n \n def __init__( self ):\n self.node2deps = {}\n self.dep2nodes = {}\n self.tail_nodes = set()\n \n #//-------------------------------------------------------//\n \n def __len__(self):\n return len(self.node2deps)\n \n #//-------------------------------------------------------//\n \n def __hasCycle( self, node, new_deps ):\n \n if node in new_deps:\n return True\n \n deps = set(new_deps)\n node2deps = self.node2deps\n \n while deps:\n dep = deps.pop()\n \n dep_deps = node2deps[dep]\n \n if node in dep_deps:\n return True\n \n deps |= dep_deps\n \n return False\n \n #//-------------------------------------------------------//\n \n def __depends( self, node, deps ):\n \n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n \n try:\n current_node_deps = node2deps[ node ]\n \n deps = { dep for dep in deps if not dep.isBuilt() }\n new_deps = deps - current_node_deps\n \n if not new_deps:\n return\n \n if self.__hasCycle( node, new_deps ):\n raise ErrorNodeDependencyCyclic( node, new_deps )\n \n self.tail_nodes.discard( node )\n \n #//-------------------------------------------------------//\n \n current_node_deps.update( new_deps )\n \n #//-------------------------------------------------------//\n \n for dep in new_deps:\n dep2nodes[ dep ].add( node )\n \n except KeyError as dep_node:\n raise ErrorNodeDependencyUnknown( node, dep_node.args[0] )\n \n #//-------------------------------------------------------//\n \n def __add( self, nodes ):\n for node in nodes:\n if node not in self.node2deps:\n self.node2deps[ node ] = set()\n self.dep2nodes[ node ] = set()\n self.tail_nodes.add( node )\n\n node_srcnodes = node.getSourceNodes()\n node_depnodes = node.getDepNodes()\n\n self.__add( node_srcnodes ) # TODO: recursively add sources and depends\n self.__add( node_depnodes ) # It would be better to rewrite this code to avoid the recursion\n \n self.__depends( node, node_srcnodes )\n self.__depends( node, node_depnodes )\n \n #//-------------------------------------------------------//\n \n def add( self, nodes ):\n self.__add( toSequence( nodes ) )\n \n #//-------------------------------------------------------//\n \n def depends( self, node, deps ):\n deps = toSequence( deps )\n \n self.__add( deps )\n self.__depends( node, deps )\n \n #//-------------------------------------------------------//\n \n def removeTail( self, node ):\n node2deps = self.node2deps\n \n try:\n deps = node2deps.pop(node)\n if deps:\n raise InternalErrorRemoveNonTailNode( node )\n except KeyError as node:\n raise InternalErrorRemoveUnknownTailNode( node.args[0] )\n \n tail_nodes = self.tail_nodes\n \n # tail_nodes.remove( node )\n \n for dep in self.dep2nodes.pop( node ):\n d = node2deps[ dep ]\n d.remove( node )\n if not d:\n tail_nodes.add( dep )\n \n #//-------------------------------------------------------//\n \n def popTails( self ):\n tails = self.tail_nodes\n self.tail_nodes = set()\n return tails\n \n #//-------------------------------------------------------//\n \n def __getAllNodes(self, nodes ):\n nodes = set(toSequence(nodes))\n all_nodes = set( nodes )\n \n node2deps = self.node2deps\n while nodes:\n node = nodes.pop()\n \n try:\n deps = node2deps[ node ] - all_nodes\n except KeyError as node:\n raise ErrorNodeUnknown( node.args[0] )\n \n all_nodes.update( deps )\n nodes.update( deps )\n \n return all_nodes\n \n #//-------------------------------------------------------//\n \n def shrinkTo(self, nodes ):\n \n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n \n ignore_nodes = set(node2deps) - self.__getAllNodes( nodes )\n \n self.tail_nodes -= ignore_nodes\n \n for node in ignore_nodes:\n del node2deps[ node ]\n del dep2nodes[ node ]\n \n for dep_nodes in dep2nodes.values():\n dep_nodes.difference_update( ignore_nodes ) \n \n #//-------------------------------------------------------//\n \n def selfTest( self ):\n if set(self.node2deps) != set(self.dep2nodes):\n raise AssertionError(\"Not all deps are added\")\n \n all_dep_nodes = set()\n \n for node in self.dep2nodes:\n if node not in self.node2deps:\n raise AssertionError(\"Missed node: %s\" % (node,) )\n \n node_deps = self.node2deps[node]\n \n if not node_deps:\n if node not in self.tail_nodes:\n raise AssertionError(\"Missed tail node: %s, tail_nodes: %s\" % (node, self.tail_nodes) )\n else:\n if node in self.tail_nodes:\n raise AssertionError(\"Invalid tail node: %s\" % (node,) )\n \n all_dep_nodes |= node_deps\n \n for dep in node_deps:\n if node not in self.dep2nodes[dep]:\n raise AssertionError(\"node not in self.dep2nodes[dep]: dep: %s, node: %s\" % (dep, node) )\n \n if all_dep_nodes - set(self.dep2nodes):\n raise AssertionError(\"Not all deps are added\")\n\n#//===========================================================================//\n\nclass _VFiles( object ):\n __slots__ = \\\n (\n 'names',\n 'handles',\n )\n \n #//-------------------------------------------------------//\n \n def __init__( self ):\n self.handles = {}\n self.names = {}\n \n #//-------------------------------------------------------//\n \n def __iter__(self):\n raise TypeError()\n \n #//-------------------------------------------------------//\n \n def __getitem__( self, builder ):\n \n builder_name = builder.name\n \n try:\n vfilename = self.names[ builder_name ]\n except KeyError:\n vfilename = os.path.join( builder.getBuildDir(), '.aql.db' )\n self.names[ builder_name ] = vfilename\n \n try:\n return self.handles[ vfilename ]\n \n except KeyError:\n vfile = ValuesFile( vfilename )\n self.handles[ vfilename ] = vfile\n \n return vfile\n\n #//-------------------------------------------------------//\n \n def close(self):\n for vfile in self.handles.values():\n vfile.close()\n \n self.handles.clear()\n self.names.clear()\n \n #//-------------------------------------------------------//\n \n def __enter__(self):\n return self\n \n #//-------------------------------------------------------//\n \n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n#//===========================================================================//\n\ndef _buildNode( node ):\n \n eventNodeBuilding( node )\n \n out = node.build()\n \n if out:\n try:\n out = out.strip()\n except Exception:\n pass\n \n return out\n\n#//===========================================================================//\n\nclass _NodeState( object ):\n __slots__ = \\\n (\n 'initialized',\n 'check_depends',\n 'check_replace',\n 'check_split',\n 'check_actual',\n 'split_nodes',\n )\n \n def __init__(self ):\n self.initialized = False\n self.check_depends = True\n self.check_replace = True\n self.check_split = True\n self.check_actual = True\n self.split_nodes = None\n \n def __str__(self):\n return \"initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s\" %\\\n (self.initialized, self.check_depends, self.check_replace, self.check_split, self.check_actual, self.split_nodes )\n \n#//===========================================================================//\n\n# noinspection PyAttributeOutsideInit\nclass _NodesBuilder (object):\n \n __slots__ = \\\n (\n 'vfiles',\n 'build_manager',\n 'task_manager',\n 'node_states',\n 'building_nodes',\n )\n \n #//-------------------------------------------------------//\n \n def __init__( self, build_manager, jobs = 0, keep_going = False, with_backtrace = True ):\n self.vfiles = _VFiles()\n self.node_states = {}\n self.building_nodes = {}\n self.build_manager = build_manager\n self.task_manager = TaskManager( num_threads = jobs, stop_on_fail = not keep_going, with_backtrace = with_backtrace )\n \n #//-------------------------------------------------------//\n \n def __enter__(self):\n return self\n \n #//-------------------------------------------------------//\n \n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n \n #//-------------------------------------------------------//\n \n def _getNodeState( self, node ):\n try:\n state = self.node_states[ node ]\n except KeyError:\n state = _NodeState()\n self.node_states[ node ] = state\n \n return state\n \n #//-------------------------------------------------------//\n \n def _removeNodeState( self, node ):\n try:\n del self.node_states[ node ]\n except KeyError:\n pass\n \n #//-------------------------------------------------------//\n \n def _addBuildingNode( self, node, state ):\n conflicting_nodes = []\n building_nodes = self.building_nodes\n \n for name, signature in node.getNamesAndSignatures():\n node_signature = (node, signature)\n \n other_node, other_signature = building_nodes.setdefault( name, node_signature )\n if other_node is not node:\n if other_signature != signature:\n raise ErrorNodeSignatureDifferent( node )\n \n conflicting_nodes.append( other_node )\n \n if conflicting_nodes:\n state.check_actual = True\n self.build_manager.depends( node, conflicting_nodes )\n return True\n \n return False\n \n #//-------------------------------------------------------//\n \n def _removeBuildingNode( self, node ):\n building_nodes = self.building_nodes\n for name in node.getNames():\n del building_nodes[ name ]\n \n #//-------------------------------------------------------//\n \n def isBuilding(self):\n return bool(self.building_nodes)\n \n #//-------------------------------------------------------//\n \n def _checkPrebuildDepends( self, node ):\n dep_nodes = node.buildDepends()\n if dep_nodes:\n self.build_manager.depends( node, dep_nodes )\n return True\n \n return False\n \n #//-------------------------------------------------------//\n \n def _checkPrebuildReplace( self, node ):\n \n if node.buildReplace():\n new_node_sources = node.getSourceNodes()\n if new_node_sources:\n self.build_manager.depends( node, new_node_sources )\n return True\n \n return False\n \n #//-------------------------------------------------------//\n \n def _checkPrebuildSplit( self, node, state ):\n \n build_manager = self.build_manager\n \n if state.check_split:\n state.check_split = False\n \n check_actual = True\n \n if node.isBatch() and state.check_actual:\n # Check for changed sources of BatchNode\n vfile = self.vfiles[ node.builder ]\n actual = build_manager.isActualNode( node, vfile )\n \n if actual:\n self._removeNodeState( node )\n build_manager.actualNode( node )\n return True\n \n check_actual = False\n \n split_nodes = node.buildSplit()\n if split_nodes:\n state.split_nodes = split_nodes\n for split_node in split_nodes:\n split_state = self._getNodeState( split_node )\n split_state.check_split = False\n split_state.check_depends = False\n split_state.check_replace = False\n split_state.check_actual = check_actual\n split_state.initialized = split_node.builder is node.builder\n \n self.build_manager.depends( node, split_nodes )\n return True\n \n elif state.split_nodes is not None:\n if node.isBatch():\n node._populateTargets()\n else:\n targets = []\n for split_node in state.split_nodes:\n targets += split_node.getTargetValues()\n \n node.target_values = targets\n \n self._removeNodeState( node )\n \n self.build_manager.completedSplitNode( node )\n \n return True\n \n return False\n \n #//-------------------------------------------------------//\n \n def _prebuild( self, node, state ):\n \n # print( \"node: %s, state: %s\" % (node, state))\n \n if not state.initialized:\n node.initiate()\n state.initialized = True\n \n if state.check_depends:\n state.check_depends = False\n if self._checkPrebuildDepends( node ):\n return True\n \n if state.check_replace:\n state.check_replace = False\n if self._checkPrebuildReplace( node ):\n return True\n \n if self._checkPrebuildSplit( node, state ):\n return True\n \n return False\n \n #//-------------------------------------------------------//\n \n def build( self, nodes ):\n \n build_manager = self.build_manager\n \n vfiles = self.vfiles\n addTask = self.task_manager.addTask\n \n tasks_check_period = 10\n added_tasks = 0\n changed = False\n \n for node in nodes:\n \n node_state = self._getNodeState( node )\n \n if self._prebuild( node, node_state ):\n changed = True\n continue\n \n if self._addBuildingNode( node, node_state ):\n continue\n \n if node_state.check_actual:\n vfile = vfiles[ node.builder ]\n actual = build_manager.isActualNode( node, vfile )\n \n if actual:\n self._removeNodeState( node )\n self._removeBuildingNode( node )\n build_manager.actualNode( node )\n changed = True\n continue\n \n addTask( node, _buildNode, node )\n \n added_tasks += 1\n \n if added_tasks == tasks_check_period:\n changed = self._getFinishedNodes( block = False ) or changed\n added_tasks = 0\n \n self._getFinishedNodes( block = not changed )\n \n #//-------------------------------------------------------//\n \n def _getFinishedNodes( self, block = True ):\n # print(\"tasks: %s, finished_tasks: %s\" % (self.task_manager.unfinished_tasks, self.task_manager.finished_tasks.qsize()))\n finished_tasks = self.task_manager.finishedTasks( block = block )\n \n vfiles = self.vfiles\n \n build_manager = self.build_manager\n \n for task in finished_tasks:\n node = task.task_id\n error = task.error\n \n self._removeNodeState( node )\n self._removeBuildingNode( node )\n \n vfile = vfiles[ node.builder ]\n \n if error is None:\n node.save( vfile )\n build_manager.completedNode( node, task.result )\n else:\n if node.isBatch():\n node.save( vfile )\n \n build_manager.failedNode( node, error )\n\n return bool(finished_tasks)\n \n #//-------------------------------------------------------//\n \n def clear( self, nodes ):\n \n vfiles = self.vfiles\n build_manager = self.build_manager\n \n for node in nodes:\n \n node_state = self._getNodeState( node )\n \n node_state.check_actual = False\n \n if self._prebuild( node, node_state ):\n continue\n \n vfile = vfiles[ node.builder ]\n node.clear( vfile )\n build_manager.removedNode( node )\n \n #//-------------------------------------------------------//\n \n def status( self, nodes ):\n \n vfiles = self.vfiles\n build_manager = self.build_manager\n \n for node in nodes:\n \n node_state = self._getNodeState( node )\n node_state.check_actual = False\n \n if self._prebuild( node, node_state ):\n continue\n \n vfile = vfiles[ node.builder ]\n if build_manager.isActualNode( node, vfile ):\n build_manager.actualNodeStatus( node )\n else:\n build_manager.outdatedNodeStatus( node )\n \n #//-------------------------------------------------------//\n \n def close( self ):\n try:\n self.task_manager.stop()\n self._getFinishedNodes( block = False )\n finally:\n self.vfiles.close()\n\n#//===========================================================================//\n\nclass BuildManager (object):\n \n __slots__ = \\\n (\n '_nodes',\n '_built_targets',\n '_failed_nodes',\n '_built_node_names',\n 'completed',\n 'actual',\n 'explain',\n )\n \n #//-------------------------------------------------------//\n \n def __init__(self):\n self._nodes = _NodesTree()\n self.__reset()\n \n #//-------------------------------------------------------//\n \n def __reset(self, build_always = False, explain = False ):\n \n self._built_targets = {}\n self._failed_nodes = {}\n self._built_node_names = set() if build_always else None\n \n self.completed = 0\n self.actual = 0\n self.explain = explain\n \n #//-------------------------------------------------------//\n \n def add( self, nodes ):\n self._nodes.add( nodes )\n \n #//-------------------------------------------------------//\n \n def depends( self, node, deps ):\n self._nodes.depends( node, deps )\n \n #//-------------------------------------------------------//\n \n def __len__(self):\n return len(self._nodes)\n \n #//-------------------------------------------------------//\n \n def selfTest( self ):\n self._nodes.selfTest()\n \n #//-------------------------------------------------------//\n \n def getTailNodes(self):\n return self._nodes.popTails()\n \n #//-------------------------------------------------------//\n \n def actualNodeStatus( self, node ):\n eventNodeActual( node, self.getProgressStr() )\n self.actualNode( node )\n \n #//-------------------------------------------------------//\n \n def outdatedNodeStatus( self, node ):\n self._failed_nodes[ node ] = None\n \n eventNodeOutdated( node, self.getProgressStr() )\n node.shrink()\n \n #//-------------------------------------------------------//\n \n def isActualNode( self, node, vfile ):\n return node.checkActual( vfile, self._built_node_names, self.explain )\n \n #//-------------------------------------------------------//\n \n def _addToBuiltNodeNames(self, node ):\n built_names = self._built_node_names\n if built_names is not None:\n built_names.update( node.getNames() )\n \n #//-------------------------------------------------------//\n \n def completedSplitNode(self, node ):\n self._nodes.removeTail( node )\n node.shrink()\n \n #//-------------------------------------------------------//\n \n def actualNode( self, node ):\n self._nodes.removeTail( node )\n self.actual += 1\n \n node.shrink()\n \n #//-------------------------------------------------------//\n \n def completedNode( self, node, builder_output ):\n self._checkAlreadyBuilt( node )\n self._nodes.removeTail( node )\n self._addToBuiltNodeNames( node )\n \n self.completed += 1\n \n eventNodeBuildingFinished( node, builder_output, self.getProgressStr() )\n \n node.shrink()\n \n #//-------------------------------------------------------//\n \n def failedNode( self, node, error ):\n self._failed_nodes[ node ] = error\n \n eventNodeBuildingFailed( node, error )\n \n #//-------------------------------------------------------//\n \n def removedNode( self, node ):\n self._nodes.removeTail( node )\n self.completed += 1\n \n eventNodeRemoved( node, self.getProgressStr() )\n \n node.shrink()\n \n #//-------------------------------------------------------//\n \n def getProgressStr(self):\n done = self.completed + self.actual\n total = len(self._nodes) + done\n \n processed = done + len(self._failed_nodes)\n \n progress = \"%s/%s\" % (processed, total)\n return progress\n \n #//-------------------------------------------------------//\n \n def close( self ):\n self._nodes = _NodesTree()\n \n #//-------------------------------------------------------//\n \n def _checkAlreadyBuilt( self, node ):\n values = node.getTargetValues()\n \n built_targets = self._built_targets\n \n for value in values:\n value_sign = value.signature\n other_value_sign = built_targets.setdefault( value.valueId(), value_sign )\n \n if other_value_sign != value_sign:\n eventBuildTargetTwice( value, node )\n \n #//-------------------------------------------------------//\n \n def build( self, jobs, keep_going, nodes = None, build_always = False, explain = False, with_backtrace = True ):\n \n self.__reset( build_always = build_always, explain = explain )\n \n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo( nodes )\n \n with _NodesBuilder( self, jobs, keep_going, with_backtrace ) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n \n if not tails and not nodes_builder.isBuilding():\n break\n \n nodes_builder.build( tails )\n \n return self.isOk()\n \n #//-------------------------------------------------------//\n \n def isOk(self):\n return not bool( self._failed_nodes )\n \n #//-------------------------------------------------------//\n \n def failsCount(self):\n return len( self._failed_nodes )\n \n #//-------------------------------------------------------//\n \n def printFails(self ):\n for node, error in self._failed_nodes.items():\n eventFailedNode( node, error )\n \n #//-------------------------------------------------------//\n \n def printBuildState(self):\n logInfo(\"Failed nodes: %s\" % len(self._failed_nodes) )\n logInfo(\"Completed nodes: %s\" % self.completed )\n logInfo(\"Actual nodes: %s\" % self.actual )\n \n #//-------------------------------------------------------//\n \n def printStatusState(self):\n logInfo(\"Outdated nodes: %s\" % len(self._failed_nodes) )\n logInfo(\"Actual nodes: %s\" % self.actual )\n \n #//-------------------------------------------------------//\n \n def clear( self, nodes = None ):\n \n self.__reset()\n \n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo( nodes )\n \n with _NodesBuilder( self ) as nodes_builder:\n while True:\n \n tails = self.getTailNodes()\n \n if not tails:\n break\n \n nodes_builder.clear( tails )\n \n #//-------------------------------------------------------//\n \n def status( self, nodes = None, explain = False ):\n \n self.__reset( explain = explain )\n \n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo( nodes )\n \n with _NodesBuilder( self ) as nodes_builder:\n \n while True:\n tails = self.getTailNodes()\n \n if not tails:\n break\n \n nodes_builder.status( tails )\n \n return self.isOk()\n",
"step-ids": [
68,
84,
88,
95,
105
]
}
|
[
68,
84,
88,
95,
105
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def mainLoop():
wilfredCommunication = command.Command()
wilfredCommunication.waitForClient()
wilfredCommand = command.Command()
while True:
if not wilfredCommunication.checkConnection():
wilfredCommunication.waitForClient()
commands = wilfredCommunication.getCommand()
for commandData in commands.split('\n'):
cmd = commandData.split(' ')[0].strip()
if cmd == '':
continue
args = [arg.strip() for arg in commandData.split(' ')[1:]]
if cmd == 'setMotorSpeed':
motorNum = int(args[0])
motorSpeed = int(args[1])
wilfredCommand.setMotorSpeed(motorNum, motorSpeed)
elif cmd == 'playMeow':
goodMessage('wilfred: playing meow from file: ', args[0])
wilfredCommand.playMeow(args[0])
elif cmd == 'getAccel':
goodMessage('wilfred: returning acceleration...')
wilfredCommunication.sendMessage('(0, 0, 0)')
else:
errorMessage('wilfred: command not recognized: ', cmd, ': ',
args)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def mainLoop():
wilfredCommunication = command.Command()
wilfredCommunication.waitForClient()
wilfredCommand = command.Command()
while True:
if not wilfredCommunication.checkConnection():
wilfredCommunication.waitForClient()
commands = wilfredCommunication.getCommand()
for commandData in commands.split('\n'):
cmd = commandData.split(' ')[0].strip()
if cmd == '':
continue
args = [arg.strip() for arg in commandData.split(' ')[1:]]
if cmd == 'setMotorSpeed':
motorNum = int(args[0])
motorSpeed = int(args[1])
wilfredCommand.setMotorSpeed(motorNum, motorSpeed)
elif cmd == 'playMeow':
goodMessage('wilfred: playing meow from file: ', args[0])
wilfredCommand.playMeow(args[0])
elif cmd == 'getAccel':
goodMessage('wilfred: returning acceleration...')
wilfredCommunication.sendMessage('(0, 0, 0)')
else:
errorMessage('wilfred: command not recognized: ', cmd, ': ',
args)
if __name__ == '__main__':
mainLoop()
<|reserved_special_token_1|>
import command
import driver
from debug import *
def mainLoop():
wilfredCommunication = command.Command()
wilfredCommunication.waitForClient()
wilfredCommand = command.Command()
while True:
if not wilfredCommunication.checkConnection():
wilfredCommunication.waitForClient()
commands = wilfredCommunication.getCommand()
for commandData in commands.split('\n'):
cmd = commandData.split(' ')[0].strip()
if cmd == '':
continue
args = [arg.strip() for arg in commandData.split(' ')[1:]]
if cmd == 'setMotorSpeed':
motorNum = int(args[0])
motorSpeed = int(args[1])
wilfredCommand.setMotorSpeed(motorNum, motorSpeed)
elif cmd == 'playMeow':
goodMessage('wilfred: playing meow from file: ', args[0])
wilfredCommand.playMeow(args[0])
elif cmd == 'getAccel':
goodMessage('wilfred: returning acceleration...')
wilfredCommunication.sendMessage('(0, 0, 0)')
else:
errorMessage('wilfred: command not recognized: ', cmd, ': ',
args)
if __name__ == '__main__':
mainLoop()
<|reserved_special_token_1|>
# wilfred.py
# Authors
# Stuart C. Larsen (SCL)
# Daryl W. Bennet (DWB)
# Set up three main modules (command, control, reconnaissance),
# and then enter main event loop.
#
# Command:
# Gather mission priorities and objectives, such as turn left, turn right
# goto GPS 45, 65, land, take off.
#
# Control:
# Fly the craft to complete the command objective.
#
# Reconnaissance:
# Gather information about wilfreds current position.
#
# Main Event Loop:
# Check command listing for new updates, check reconnaisannce for current
# posistion, and then control the craft to the correct zone. Main loop will
# be a very fast feedback loop.
import command
import driver
from debug import *
def mainLoop():
wilfredCommunication = command.Command()
wilfredCommunication.waitForClient()
wilfredCommand = command.Command()
while True:
if not wilfredCommunication.checkConnection():
wilfredCommunication.waitForClient()
commands = wilfredCommunication.getCommand()
for commandData in commands.split('\n'):
cmd = commandData.split(' ')[0].strip()
if cmd == "": continue
args = [arg.strip() for arg in commandData.split(' ')[1:]]
# setMotorSpeed (0-3) (0-100)
if cmd == "setMotorSpeed":
motorNum = int(args[0])
motorSpeed = int(args[1])
wilfredCommand.setMotorSpeed(motorNum, motorSpeed)
elif cmd == "playMeow":
goodMessage("wilfred: playing meow from file: ", args[0])
wilfredCommand.playMeow(args[0])
elif cmd == "getAccel":
goodMessage("wilfred: returning acceleration...")
wilfredCommunication.sendMessage("(0, 0, 0)")
else:
errorMessage("wilfred: command not recognized: ", cmd, ": ", args)
if __name__ == "__main__":
mainLoop()
|
flexible
|
{
"blob_id": "a77fb90cdc6e7f9b70f9feeefc2b7f8e93a2d8c5",
"index": 9875,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef mainLoop():\n wilfredCommunication = command.Command()\n wilfredCommunication.waitForClient()\n wilfredCommand = command.Command()\n while True:\n if not wilfredCommunication.checkConnection():\n wilfredCommunication.waitForClient()\n commands = wilfredCommunication.getCommand()\n for commandData in commands.split('\\n'):\n cmd = commandData.split(' ')[0].strip()\n if cmd == '':\n continue\n args = [arg.strip() for arg in commandData.split(' ')[1:]]\n if cmd == 'setMotorSpeed':\n motorNum = int(args[0])\n motorSpeed = int(args[1])\n wilfredCommand.setMotorSpeed(motorNum, motorSpeed)\n elif cmd == 'playMeow':\n goodMessage('wilfred: playing meow from file: ', args[0])\n wilfredCommand.playMeow(args[0])\n elif cmd == 'getAccel':\n goodMessage('wilfred: returning acceleration...')\n wilfredCommunication.sendMessage('(0, 0, 0)')\n else:\n errorMessage('wilfred: command not recognized: ', cmd, ': ',\n args)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef mainLoop():\n wilfredCommunication = command.Command()\n wilfredCommunication.waitForClient()\n wilfredCommand = command.Command()\n while True:\n if not wilfredCommunication.checkConnection():\n wilfredCommunication.waitForClient()\n commands = wilfredCommunication.getCommand()\n for commandData in commands.split('\\n'):\n cmd = commandData.split(' ')[0].strip()\n if cmd == '':\n continue\n args = [arg.strip() for arg in commandData.split(' ')[1:]]\n if cmd == 'setMotorSpeed':\n motorNum = int(args[0])\n motorSpeed = int(args[1])\n wilfredCommand.setMotorSpeed(motorNum, motorSpeed)\n elif cmd == 'playMeow':\n goodMessage('wilfred: playing meow from file: ', args[0])\n wilfredCommand.playMeow(args[0])\n elif cmd == 'getAccel':\n goodMessage('wilfred: returning acceleration...')\n wilfredCommunication.sendMessage('(0, 0, 0)')\n else:\n errorMessage('wilfred: command not recognized: ', cmd, ': ',\n args)\n\n\nif __name__ == '__main__':\n mainLoop()\n",
"step-4": "import command\nimport driver\nfrom debug import *\n\n\ndef mainLoop():\n wilfredCommunication = command.Command()\n wilfredCommunication.waitForClient()\n wilfredCommand = command.Command()\n while True:\n if not wilfredCommunication.checkConnection():\n wilfredCommunication.waitForClient()\n commands = wilfredCommunication.getCommand()\n for commandData in commands.split('\\n'):\n cmd = commandData.split(' ')[0].strip()\n if cmd == '':\n continue\n args = [arg.strip() for arg in commandData.split(' ')[1:]]\n if cmd == 'setMotorSpeed':\n motorNum = int(args[0])\n motorSpeed = int(args[1])\n wilfredCommand.setMotorSpeed(motorNum, motorSpeed)\n elif cmd == 'playMeow':\n goodMessage('wilfred: playing meow from file: ', args[0])\n wilfredCommand.playMeow(args[0])\n elif cmd == 'getAccel':\n goodMessage('wilfred: returning acceleration...')\n wilfredCommunication.sendMessage('(0, 0, 0)')\n else:\n errorMessage('wilfred: command not recognized: ', cmd, ': ',\n args)\n\n\nif __name__ == '__main__':\n mainLoop()\n",
"step-5": "# wilfred.py\n# Authors\n# Stuart C. Larsen (SCL)\n# Daryl W. Bennet (DWB)\n\n# Set up three main modules (command, control, reconnaissance),\n# and then enter main event loop.\n#\n# Command:\n# Gather mission priorities and objectives, such as turn left, turn right\n# goto GPS 45, 65, land, take off.\n#\n# Control:\n# Fly the craft to complete the command objective.\n#\n# Reconnaissance:\n# Gather information about wilfreds current position.\n#\n# Main Event Loop:\n# Check command listing for new updates, check reconnaisannce for current\n# posistion, and then control the craft to the correct zone. Main loop will\n# be a very fast feedback loop.\n\nimport command\nimport driver\nfrom debug import *\n\ndef mainLoop():\n wilfredCommunication = command.Command()\n wilfredCommunication.waitForClient()\n\n wilfredCommand = command.Command()\n\n while True:\n if not wilfredCommunication.checkConnection():\n wilfredCommunication.waitForClient()\n commands = wilfredCommunication.getCommand()\n \n\n for commandData in commands.split('\\n'):\n cmd = commandData.split(' ')[0].strip()\n if cmd == \"\": continue\n args = [arg.strip() for arg in commandData.split(' ')[1:]]\n \n \n # setMotorSpeed (0-3) (0-100)\n if cmd == \"setMotorSpeed\":\n motorNum = int(args[0])\n motorSpeed = int(args[1])\n wilfredCommand.setMotorSpeed(motorNum, motorSpeed)\n elif cmd == \"playMeow\":\n goodMessage(\"wilfred: playing meow from file: \", args[0])\n wilfredCommand.playMeow(args[0])\n elif cmd == \"getAccel\":\n goodMessage(\"wilfred: returning acceleration...\")\n wilfredCommunication.sendMessage(\"(0, 0, 0)\")\n else:\n errorMessage(\"wilfred: command not recognized: \", cmd, \": \", args)\n \n\nif __name__ == \"__main__\":\n mainLoop()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def run_explanations(network, explanation_module, data_iterator):
network.eval()
best_accuracy = 0
worst_accuracy = 0
best_correct = 0
worst_correct = 0
covered = 0
total = 0
pbar = tqdm()
pbar.reset(total=len(data_iterator))
for _, data in enumerate(data_iterator):
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1
story = torch.cat((p1, p2, p3, p4), 1)
background = [p1, p2, p3, p4]
answers = [a1, a2]
total += y.size(0)
with torch.no_grad():
outcome, rh, wh = network(story, [a1, a2])
predicted = torch.argmax(outcome, 1)
for index_elem in range(p1.shape[0]):
elem_background = [p1[index_elem:index_elem + 1, :], p2[
index_elem:index_elem + 1, :], p3[index_elem:index_elem +
1, :], p4[index_elem:index_elem + 1, :]]
elem_answers = [a1[index_elem:index_elem + 1, :], a2[
index_elem:index_elem + 1, :]]
elem_predicted = predicted[index_elem]
sgt = explanation_module.get_sgt(network, elem_background,
elem_answers)
if len(set(sgt)) > 1:
covered += 1
rank, _ = explanation_module.get_rank(elem_background,
wh[0][0], rh[elem_predicted.item() + 1][0])
best_prediction = sgt[rank[0] - 1]
best_correct += (elem_predicted == best_prediction).sum(
).item()
worst_prediction = sgt[rank[-1] - 1]
worst_correct += (elem_predicted == worst_prediction).sum(
).item()
best_accuracy = float(best_correct / covered
) if best_correct > 0 else 0
worst_accuracy = float(worst_correct / covered
) if worst_correct > 0 else 0
pbar.set_postfix({'Best': best_accuracy, 'Worst': worst_accuracy,
'cov': covered / total})
pbar.update()
pbar.close()
return best_accuracy, worst_accuracy
<|reserved_special_token_0|>
def run_val_epoch(network, data_iterator):
network.eval()
accuracy = 0
correct = 0
total = 0
pbar = tqdm()
pbar.reset(total=len(data_iterator))
with torch.no_grad():
for _, data in enumerate(data_iterator):
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1
story = torch.cat((p1, p2, p3, p4), 1)
outcome, _, _ = network(story, [a1, a2])
predicted = torch.argmax(outcome, 1)
correct += (predicted == y).sum().item()
total += y.size(0)
accuracy = float(correct / total) if correct > 0 else 0
pbar.set_postfix({'Acc': accuracy})
pbar.update()
pbar.close()
return accuracy
<|reserved_special_token_0|>
def main(argv):
path_model = FLAGS.path_model
path_training = FLAGS.path_training
path_val = FLAGS.path_val
top_k = FLAGS.top_k
use_surr = FLAGS.use_surrogate
run_training(path_training, path_val, path_model, top_k, use_surr)
print('Training process ended! The new model is stored on {}.'.format(
path_model))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def run_explanations(network, explanation_module, data_iterator):
network.eval()
best_accuracy = 0
worst_accuracy = 0
best_correct = 0
worst_correct = 0
covered = 0
total = 0
pbar = tqdm()
pbar.reset(total=len(data_iterator))
for _, data in enumerate(data_iterator):
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1
story = torch.cat((p1, p2, p3, p4), 1)
background = [p1, p2, p3, p4]
answers = [a1, a2]
total += y.size(0)
with torch.no_grad():
outcome, rh, wh = network(story, [a1, a2])
predicted = torch.argmax(outcome, 1)
for index_elem in range(p1.shape[0]):
elem_background = [p1[index_elem:index_elem + 1, :], p2[
index_elem:index_elem + 1, :], p3[index_elem:index_elem +
1, :], p4[index_elem:index_elem + 1, :]]
elem_answers = [a1[index_elem:index_elem + 1, :], a2[
index_elem:index_elem + 1, :]]
elem_predicted = predicted[index_elem]
sgt = explanation_module.get_sgt(network, elem_background,
elem_answers)
if len(set(sgt)) > 1:
covered += 1
rank, _ = explanation_module.get_rank(elem_background,
wh[0][0], rh[elem_predicted.item() + 1][0])
best_prediction = sgt[rank[0] - 1]
best_correct += (elem_predicted == best_prediction).sum(
).item()
worst_prediction = sgt[rank[-1] - 1]
worst_correct += (elem_predicted == worst_prediction).sum(
).item()
best_accuracy = float(best_correct / covered
) if best_correct > 0 else 0
worst_accuracy = float(worst_correct / covered
) if worst_correct > 0 else 0
pbar.set_postfix({'Best': best_accuracy, 'Worst': worst_accuracy,
'cov': covered / total})
pbar.update()
pbar.close()
return best_accuracy, worst_accuracy
def run_training_epoch(network, data_iterator, loss_function, optimizer,
max_grad_norm):
network.train()
accuracy = 0
correct = 0
total = 0
losses = []
pbar = tqdm()
pbar.reset(total=len(data_iterator))
for _, data in enumerate(data_iterator):
optimizer.zero_grad()
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1
story = torch.cat((p1, p2, p3, p4), 1)
outcome, _, _ = network(story, [a1, a2])
predicted = torch.argmax(outcome, 1)
loss = loss_function(outcome, y)
loss.backward()
losses.append(loss.item())
correct += (predicted == y).sum().item()
total += y.size(0)
accuracy = float(correct / total) if correct > 0 else 0
nn.utils.clip_grad_norm_(network.parameters(), max_norm=max_grad_norm)
optimizer.step()
pbar.set_postfix({'Acc': accuracy})
pbar.update()
pbar.close()
return accuracy, np.mean(losses)
def run_val_epoch(network, data_iterator):
network.eval()
accuracy = 0
correct = 0
total = 0
pbar = tqdm()
pbar.reset(total=len(data_iterator))
with torch.no_grad():
for _, data in enumerate(data_iterator):
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1
story = torch.cat((p1, p2, p3, p4), 1)
outcome, _, _ = network(story, [a1, a2])
predicted = torch.argmax(outcome, 1)
correct += (predicted == y).sum().item()
total += y.size(0)
accuracy = float(correct / total) if correct > 0 else 0
pbar.set_postfix({'Acc': accuracy})
pbar.update()
pbar.close()
return accuracy
def run_training(path_training, path_val, path_model, top_k,
required_explanation):
config_dict = yaml.safe_load(open('config.yaml', 'r'))
controller_config = ControllerConfig(**config_dict['controller'])
memory_config = MemoryConfig(**config_dict['memory'])
training_parameters = TrainingConfig(**config_dict['training'])
DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
train_dataset = functions.get_cloze_dataset(path_training)
val_dataset = functions.get_cloze_dataset(path_val)
train_iterator = torchtext.data.Iterator(train_dataset, batch_size=
training_parameters.batch_size, train=True, shuffle=True, device=DEVICE
)
val_iterator = torchtext.data.Iterator(val_dataset, batch_size=
training_parameters.batch_size, train=False, sort=False, device=DEVICE)
vocab = torch.load('dataset/vocab')['vocab']
embedding_pretrained_weights = vocab.vectors
pre_trained_embeddings = torch.as_tensor(embedding_pretrained_weights).to(
DEVICE)
padding_index = 1
embedding_dim = len(embedding_pretrained_weights[0])
network = model.ClozeModel(controller_config, memory_config,
embedding_dim, len(pre_trained_embeddings), dropout=
training_parameters.dropout).to(DEVICE)
network.embeddings.weight.data.copy_(pre_trained_embeddings)
network.embeddings.weight.requires_grad = True
explanation_mod = core.dnc.explanation.ExplanationModule(padding_value=
padding_index, top_k=top_k)
loss_function = nn.CrossEntropyLoss()
optimizer = optim.Adam(network.parameters(), lr=training_parameters.
learning_rate, eps=1e-07)
top1_acc = 0.0
for epoch in range(1, 11):
print('Running epoch {}'.format(epoch))
_, _ = run_training_epoch(network, train_iterator, loss_function,
optimizer, training_parameters.max_grad_norm)
print('Validation epoch {}'.format(epoch))
accuracy = run_val_epoch(network, val_iterator)
if required_explanation:
print('Explaining training dataset')
run_explanations(network, explanation_mod, train_iterator)
print('Explain validation dataset')
run_explanations(network, explanation_mod, val_iterator)
if accuracy > top1_acc:
top1_acc = accuracy
print('saving model...')
checkpoint = {'controller_config': config_dict['controller'],
'memory_config': config_dict['memory'], 'state_dict':
network.state_dict(), 'len_embeddings': len(
pre_trained_embeddings)}
torch.save(checkpoint, path_model)
def main(argv):
path_model = FLAGS.path_model
path_training = FLAGS.path_training
path_val = FLAGS.path_val
top_k = FLAGS.top_k
use_surr = FLAGS.use_surrogate
run_training(path_training, path_val, path_model, top_k, use_surr)
print('Training process ended! The new model is stored on {}.'.format(
path_model))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
absl.flags.DEFINE_string('path_model', None, 'Path of the trained model')
absl.flags.DEFINE_string('path_training', None,
'Path where is stored the csv dataset')
absl.flags.DEFINE_string('path_val', None,
'Path where is stored the csv dataset')
absl.flags.DEFINE_integer('top_k', 25,
'Number of read cells considered for each step')
absl.flags.DEFINE_boolean('use_surrogate', False,
' Whether to extract surrogate ground truth for explanation')
absl.flags.mark_flag_as_required('path_model')
absl.flags.mark_flag_as_required('path_training')
absl.flags.mark_flag_as_required('path_val')
def run_explanations(network, explanation_module, data_iterator):
network.eval()
best_accuracy = 0
worst_accuracy = 0
best_correct = 0
worst_correct = 0
covered = 0
total = 0
pbar = tqdm()
pbar.reset(total=len(data_iterator))
for _, data in enumerate(data_iterator):
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1
story = torch.cat((p1, p2, p3, p4), 1)
background = [p1, p2, p3, p4]
answers = [a1, a2]
total += y.size(0)
with torch.no_grad():
outcome, rh, wh = network(story, [a1, a2])
predicted = torch.argmax(outcome, 1)
for index_elem in range(p1.shape[0]):
elem_background = [p1[index_elem:index_elem + 1, :], p2[
index_elem:index_elem + 1, :], p3[index_elem:index_elem +
1, :], p4[index_elem:index_elem + 1, :]]
elem_answers = [a1[index_elem:index_elem + 1, :], a2[
index_elem:index_elem + 1, :]]
elem_predicted = predicted[index_elem]
sgt = explanation_module.get_sgt(network, elem_background,
elem_answers)
if len(set(sgt)) > 1:
covered += 1
rank, _ = explanation_module.get_rank(elem_background,
wh[0][0], rh[elem_predicted.item() + 1][0])
best_prediction = sgt[rank[0] - 1]
best_correct += (elem_predicted == best_prediction).sum(
).item()
worst_prediction = sgt[rank[-1] - 1]
worst_correct += (elem_predicted == worst_prediction).sum(
).item()
best_accuracy = float(best_correct / covered
) if best_correct > 0 else 0
worst_accuracy = float(worst_correct / covered
) if worst_correct > 0 else 0
pbar.set_postfix({'Best': best_accuracy, 'Worst': worst_accuracy,
'cov': covered / total})
pbar.update()
pbar.close()
return best_accuracy, worst_accuracy
def run_training_epoch(network, data_iterator, loss_function, optimizer,
max_grad_norm):
network.train()
accuracy = 0
correct = 0
total = 0
losses = []
pbar = tqdm()
pbar.reset(total=len(data_iterator))
for _, data in enumerate(data_iterator):
optimizer.zero_grad()
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1
story = torch.cat((p1, p2, p3, p4), 1)
outcome, _, _ = network(story, [a1, a2])
predicted = torch.argmax(outcome, 1)
loss = loss_function(outcome, y)
loss.backward()
losses.append(loss.item())
correct += (predicted == y).sum().item()
total += y.size(0)
accuracy = float(correct / total) if correct > 0 else 0
nn.utils.clip_grad_norm_(network.parameters(), max_norm=max_grad_norm)
optimizer.step()
pbar.set_postfix({'Acc': accuracy})
pbar.update()
pbar.close()
return accuracy, np.mean(losses)
def run_val_epoch(network, data_iterator):
network.eval()
accuracy = 0
correct = 0
total = 0
pbar = tqdm()
pbar.reset(total=len(data_iterator))
with torch.no_grad():
for _, data in enumerate(data_iterator):
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1
story = torch.cat((p1, p2, p3, p4), 1)
outcome, _, _ = network(story, [a1, a2])
predicted = torch.argmax(outcome, 1)
correct += (predicted == y).sum().item()
total += y.size(0)
accuracy = float(correct / total) if correct > 0 else 0
pbar.set_postfix({'Acc': accuracy})
pbar.update()
pbar.close()
return accuracy
def run_training(path_training, path_val, path_model, top_k,
required_explanation):
config_dict = yaml.safe_load(open('config.yaml', 'r'))
controller_config = ControllerConfig(**config_dict['controller'])
memory_config = MemoryConfig(**config_dict['memory'])
training_parameters = TrainingConfig(**config_dict['training'])
DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
train_dataset = functions.get_cloze_dataset(path_training)
val_dataset = functions.get_cloze_dataset(path_val)
train_iterator = torchtext.data.Iterator(train_dataset, batch_size=
training_parameters.batch_size, train=True, shuffle=True, device=DEVICE
)
val_iterator = torchtext.data.Iterator(val_dataset, batch_size=
training_parameters.batch_size, train=False, sort=False, device=DEVICE)
vocab = torch.load('dataset/vocab')['vocab']
embedding_pretrained_weights = vocab.vectors
pre_trained_embeddings = torch.as_tensor(embedding_pretrained_weights).to(
DEVICE)
padding_index = 1
embedding_dim = len(embedding_pretrained_weights[0])
network = model.ClozeModel(controller_config, memory_config,
embedding_dim, len(pre_trained_embeddings), dropout=
training_parameters.dropout).to(DEVICE)
network.embeddings.weight.data.copy_(pre_trained_embeddings)
network.embeddings.weight.requires_grad = True
explanation_mod = core.dnc.explanation.ExplanationModule(padding_value=
padding_index, top_k=top_k)
loss_function = nn.CrossEntropyLoss()
optimizer = optim.Adam(network.parameters(), lr=training_parameters.
learning_rate, eps=1e-07)
top1_acc = 0.0
for epoch in range(1, 11):
print('Running epoch {}'.format(epoch))
_, _ = run_training_epoch(network, train_iterator, loss_function,
optimizer, training_parameters.max_grad_norm)
print('Validation epoch {}'.format(epoch))
accuracy = run_val_epoch(network, val_iterator)
if required_explanation:
print('Explaining training dataset')
run_explanations(network, explanation_mod, train_iterator)
print('Explain validation dataset')
run_explanations(network, explanation_mod, val_iterator)
if accuracy > top1_acc:
top1_acc = accuracy
print('saving model...')
checkpoint = {'controller_config': config_dict['controller'],
'memory_config': config_dict['memory'], 'state_dict':
network.state_dict(), 'len_embeddings': len(
pre_trained_embeddings)}
torch.save(checkpoint, path_model)
def main(argv):
path_model = FLAGS.path_model
path_training = FLAGS.path_training
path_val = FLAGS.path_val
top_k = FLAGS.top_k
use_surr = FLAGS.use_surrogate
run_training(path_training, path_val, path_model, top_k, use_surr)
print('Training process ended! The new model is stored on {}.'.format(
path_model))
if __name__ == '__main__':
absl.app.run(main)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
FLAGS = absl.flags.FLAGS
absl.flags.DEFINE_string('path_model', None, 'Path of the trained model')
absl.flags.DEFINE_string('path_training', None,
'Path where is stored the csv dataset')
absl.flags.DEFINE_string('path_val', None,
'Path where is stored the csv dataset')
absl.flags.DEFINE_integer('top_k', 25,
'Number of read cells considered for each step')
absl.flags.DEFINE_boolean('use_surrogate', False,
' Whether to extract surrogate ground truth for explanation')
absl.flags.mark_flag_as_required('path_model')
absl.flags.mark_flag_as_required('path_training')
absl.flags.mark_flag_as_required('path_val')
def run_explanations(network, explanation_module, data_iterator):
network.eval()
best_accuracy = 0
worst_accuracy = 0
best_correct = 0
worst_correct = 0
covered = 0
total = 0
pbar = tqdm()
pbar.reset(total=len(data_iterator))
for _, data in enumerate(data_iterator):
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1
story = torch.cat((p1, p2, p3, p4), 1)
background = [p1, p2, p3, p4]
answers = [a1, a2]
total += y.size(0)
with torch.no_grad():
outcome, rh, wh = network(story, [a1, a2])
predicted = torch.argmax(outcome, 1)
for index_elem in range(p1.shape[0]):
elem_background = [p1[index_elem:index_elem + 1, :], p2[
index_elem:index_elem + 1, :], p3[index_elem:index_elem +
1, :], p4[index_elem:index_elem + 1, :]]
elem_answers = [a1[index_elem:index_elem + 1, :], a2[
index_elem:index_elem + 1, :]]
elem_predicted = predicted[index_elem]
sgt = explanation_module.get_sgt(network, elem_background,
elem_answers)
if len(set(sgt)) > 1:
covered += 1
rank, _ = explanation_module.get_rank(elem_background,
wh[0][0], rh[elem_predicted.item() + 1][0])
best_prediction = sgt[rank[0] - 1]
best_correct += (elem_predicted == best_prediction).sum(
).item()
worst_prediction = sgt[rank[-1] - 1]
worst_correct += (elem_predicted == worst_prediction).sum(
).item()
best_accuracy = float(best_correct / covered
) if best_correct > 0 else 0
worst_accuracy = float(worst_correct / covered
) if worst_correct > 0 else 0
pbar.set_postfix({'Best': best_accuracy, 'Worst': worst_accuracy,
'cov': covered / total})
pbar.update()
pbar.close()
return best_accuracy, worst_accuracy
def run_training_epoch(network, data_iterator, loss_function, optimizer,
max_grad_norm):
network.train()
accuracy = 0
correct = 0
total = 0
losses = []
pbar = tqdm()
pbar.reset(total=len(data_iterator))
for _, data in enumerate(data_iterator):
optimizer.zero_grad()
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1
story = torch.cat((p1, p2, p3, p4), 1)
outcome, _, _ = network(story, [a1, a2])
predicted = torch.argmax(outcome, 1)
loss = loss_function(outcome, y)
loss.backward()
losses.append(loss.item())
correct += (predicted == y).sum().item()
total += y.size(0)
accuracy = float(correct / total) if correct > 0 else 0
nn.utils.clip_grad_norm_(network.parameters(), max_norm=max_grad_norm)
optimizer.step()
pbar.set_postfix({'Acc': accuracy})
pbar.update()
pbar.close()
return accuracy, np.mean(losses)
def run_val_epoch(network, data_iterator):
network.eval()
accuracy = 0
correct = 0
total = 0
pbar = tqdm()
pbar.reset(total=len(data_iterator))
with torch.no_grad():
for _, data in enumerate(data_iterator):
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1
story = torch.cat((p1, p2, p3, p4), 1)
outcome, _, _ = network(story, [a1, a2])
predicted = torch.argmax(outcome, 1)
correct += (predicted == y).sum().item()
total += y.size(0)
accuracy = float(correct / total) if correct > 0 else 0
pbar.set_postfix({'Acc': accuracy})
pbar.update()
pbar.close()
return accuracy
def run_training(path_training, path_val, path_model, top_k,
required_explanation):
config_dict = yaml.safe_load(open('config.yaml', 'r'))
controller_config = ControllerConfig(**config_dict['controller'])
memory_config = MemoryConfig(**config_dict['memory'])
training_parameters = TrainingConfig(**config_dict['training'])
DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
train_dataset = functions.get_cloze_dataset(path_training)
val_dataset = functions.get_cloze_dataset(path_val)
train_iterator = torchtext.data.Iterator(train_dataset, batch_size=
training_parameters.batch_size, train=True, shuffle=True, device=DEVICE
)
val_iterator = torchtext.data.Iterator(val_dataset, batch_size=
training_parameters.batch_size, train=False, sort=False, device=DEVICE)
vocab = torch.load('dataset/vocab')['vocab']
embedding_pretrained_weights = vocab.vectors
pre_trained_embeddings = torch.as_tensor(embedding_pretrained_weights).to(
DEVICE)
padding_index = 1
embedding_dim = len(embedding_pretrained_weights[0])
network = model.ClozeModel(controller_config, memory_config,
embedding_dim, len(pre_trained_embeddings), dropout=
training_parameters.dropout).to(DEVICE)
network.embeddings.weight.data.copy_(pre_trained_embeddings)
network.embeddings.weight.requires_grad = True
explanation_mod = core.dnc.explanation.ExplanationModule(padding_value=
padding_index, top_k=top_k)
loss_function = nn.CrossEntropyLoss()
optimizer = optim.Adam(network.parameters(), lr=training_parameters.
learning_rate, eps=1e-07)
top1_acc = 0.0
for epoch in range(1, 11):
print('Running epoch {}'.format(epoch))
_, _ = run_training_epoch(network, train_iterator, loss_function,
optimizer, training_parameters.max_grad_norm)
print('Validation epoch {}'.format(epoch))
accuracy = run_val_epoch(network, val_iterator)
if required_explanation:
print('Explaining training dataset')
run_explanations(network, explanation_mod, train_iterator)
print('Explain validation dataset')
run_explanations(network, explanation_mod, val_iterator)
if accuracy > top1_acc:
top1_acc = accuracy
print('saving model...')
checkpoint = {'controller_config': config_dict['controller'],
'memory_config': config_dict['memory'], 'state_dict':
network.state_dict(), 'len_embeddings': len(
pre_trained_embeddings)}
torch.save(checkpoint, path_model)
def main(argv):
path_model = FLAGS.path_model
path_training = FLAGS.path_training
path_val = FLAGS.path_val
top_k = FLAGS.top_k
use_surr = FLAGS.use_surrogate
run_training(path_training, path_val, path_model, top_k, use_surr)
print('Training process ended! The new model is stored on {}.'.format(
path_model))
if __name__ == '__main__':
absl.app.run(main)
<|reserved_special_token_1|>
import torch
import torch.nn as nn
import torch.optim as optim
import torchtext
import absl.flags
import absl.app
import pickle
import yaml
import numpy as np
from tqdm import tqdm
from core import model
import core.dnc.explanation
from core import functions
from core.config import ControllerConfig, MemoryConfig, TrainingConfig
# user flags
FLAGS = absl.flags.FLAGS
absl.flags.DEFINE_string("path_model", None, "Path of the trained model")
absl.flags.DEFINE_string("path_training", None, "Path where is stored the csv dataset")
absl.flags.DEFINE_string("path_val", None, "Path where is stored the csv dataset")
absl.flags.DEFINE_integer("top_k", 25, "Number of read cells considered for each step")
absl.flags.DEFINE_boolean("use_surrogate", False, " Whether to extract surrogate ground truth for explanation")
absl.flags.mark_flag_as_required("path_model")
absl.flags.mark_flag_as_required("path_training")
absl.flags.mark_flag_as_required("path_val")
def run_explanations(network, explanation_module, data_iterator):
network.eval()
best_accuracy = 0
worst_accuracy = 0
best_correct = 0
worst_correct = 0
covered = 0
total = 0
#print stuff
pbar = tqdm()
pbar.reset(total=len(data_iterator))
for _, data in enumerate(data_iterator):
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1 # gold index
story = torch.cat((p1,p2,p3,p4),1)
background = [p1,p2,p3,p4]
answers = [a1,a2]
total += y.size(0)
#get output
with torch.no_grad():
outcome, rh, wh = network(story,[a1,a2])
predicted = torch.argmax(outcome, 1)
for index_elem in range(p1.shape[0]):
elem_background = [p1[index_elem:index_elem+1,:], p2[index_elem:index_elem+1,:],p3[index_elem:index_elem+1,:],p4[index_elem:index_elem+1,:]]
elem_answers = [a1[index_elem:index_elem+1,:], a2[index_elem:index_elem+1,:]]
elem_predicted = predicted[index_elem]
sgt = explanation_module.get_sgt(network, elem_background,elem_answers )
# case where there are contraddictory surrogate ground truth
if len(set(sgt)) > 1:
covered += 1
rank, _ = explanation_module.get_rank(elem_background,wh[0][0],rh[elem_predicted.item()+1][0] )
best_prediction = sgt[rank[0]-1]
best_correct += (elem_predicted == best_prediction).sum().item()
worst_prediction = sgt[rank[-1]-1]
worst_correct += (elem_predicted == worst_prediction).sum().item()
best_accuracy = float(best_correct / covered) if best_correct > 0 else 0
worst_accuracy = float(worst_correct / covered) if worst_correct > 0 else 0
#print
pbar.set_postfix({'Best':best_accuracy,'Worst':worst_accuracy,
'cov':covered/total})
pbar.update()
pbar.close()
return best_accuracy, worst_accuracy
def run_training_epoch(network, data_iterator, loss_function, optimizer, max_grad_norm):
network.train()
# init cumulative variables
accuracy = 0
correct = 0
total = 0
losses = []
# print utility
pbar = tqdm()
pbar.reset(total=len(data_iterator))
#data_iterator.init_epoch()
for _, data in enumerate(data_iterator):
optimizer.zero_grad()
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1 # gold index
story = torch.cat((p1,p2,p3,p4),1)
# get output
outcome, _, _ = network(story,[a1,a2])
predicted = torch.argmax(outcome, 1)
# get loss
loss = loss_function(outcome,y)
loss.backward()
losses.append(loss.item())
# update metrics
correct += (predicted == y).sum().item()
total += y.size(0)
accuracy = float(correct / total) if correct > 0 else 0
# update weights
nn.utils.clip_grad_norm_(network.parameters(), max_norm=max_grad_norm)
optimizer.step()
pbar.set_postfix({'Acc':accuracy})
#print
pbar.update()
pbar.close()
return accuracy, np.mean(losses)
def run_val_epoch(network, data_iterator):
network.eval()
accuracy = 0
correct = 0
total = 0
pbar = tqdm()
pbar.reset(total=len(data_iterator))
with torch.no_grad():
for _, data in enumerate(data_iterator):
(_,p1, p2, p3, p4, a1, a2), y = data
y = y - 1 # gold index
story = torch.cat((p1,p2,p3,p4),1)
outcome, _, _ = network(story,[a1,a2])
# update metrics
predicted = torch.argmax(outcome, 1)
correct += (predicted == y).sum().item()
total += y.size(0)
accuracy = float(correct / total) if correct > 0 else 0
#print
pbar.set_postfix({'Acc':accuracy})
pbar.update()
pbar.close()
return accuracy
def run_training(path_training, path_val, path_model, top_k, required_explanation):
#get configuration from dict and user
config_dict = yaml.safe_load(open("config.yaml", 'r'))
controller_config = ControllerConfig(**config_dict['controller'])
memory_config = MemoryConfig(**config_dict['memory'])
training_parameters = TrainingConfig(**config_dict['training'])
# get available device
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
train_dataset = functions.get_cloze_dataset(path_training)
val_dataset = functions.get_cloze_dataset(path_val)
train_iterator = torchtext.data.Iterator(train_dataset,batch_size=training_parameters.batch_size, train=True, shuffle=True, device=DEVICE)
val_iterator = torchtext.data.Iterator(val_dataset,batch_size=training_parameters.batch_size, train=False, sort=False,device=DEVICE)
# Get Embedding
vocab = torch.load("dataset/vocab")['vocab']
embedding_pretrained_weights = vocab.vectors
pre_trained_embeddings = torch.as_tensor(embedding_pretrained_weights).to(DEVICE)
padding_index=1
embedding_dim = len(embedding_pretrained_weights[0])
#init model
network = model.ClozeModel(controller_config, memory_config, embedding_dim,len(pre_trained_embeddings),dropout=training_parameters.dropout).to(DEVICE)
network.embeddings.weight.data.copy_(pre_trained_embeddings)
network.embeddings.weight.requires_grad = True
explanation_mod = core.dnc.explanation.ExplanationModule(padding_value=padding_index,top_k=top_k)
loss_function = nn.CrossEntropyLoss()
optimizer = optim.Adam(network.parameters(), lr=training_parameters.learning_rate, eps=1e-7)
# initialize variables
top1_acc = 0.0
for epoch in range(1,11):
print("Running epoch {}".format(epoch))
_,_ = run_training_epoch(network,train_iterator,loss_function,optimizer,training_parameters.max_grad_norm)
print("Validation epoch {}".format(epoch))
accuracy = run_val_epoch(network,val_iterator)
if required_explanation:
print("Explaining training dataset")
run_explanations(network,explanation_mod,train_iterator)
print("Explain validation dataset")
run_explanations(network,explanation_mod,val_iterator)
if accuracy > top1_acc:
top1_acc = accuracy
print("saving model...")
checkpoint = {'controller_config':config_dict['controller'], 'memory_config':config_dict['memory'],
'state_dict':network.state_dict(), 'len_embeddings':len(pre_trained_embeddings)}
torch.save(checkpoint, path_model)
def main(argv):
path_model = FLAGS.path_model
path_training = FLAGS.path_training
path_val = FLAGS.path_val
top_k = FLAGS.top_k
use_surr = FLAGS.use_surrogate
run_training(path_training,path_val, path_model, top_k, use_surr)
print("Training process ended! The new model is stored on {}.".format(path_model))
if __name__ == '__main__':
absl.app.run(main)
|
flexible
|
{
"blob_id": "00dbcae2d3941c9ef4c8b6753b8f6f7a46417400",
"index": 5110,
"step-1": "<mask token>\n\n\ndef run_explanations(network, explanation_module, data_iterator):\n network.eval()\n best_accuracy = 0\n worst_accuracy = 0\n best_correct = 0\n worst_correct = 0\n covered = 0\n total = 0\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n for _, data in enumerate(data_iterator):\n (_, p1, p2, p3, p4, a1, a2), y = data\n y = y - 1\n story = torch.cat((p1, p2, p3, p4), 1)\n background = [p1, p2, p3, p4]\n answers = [a1, a2]\n total += y.size(0)\n with torch.no_grad():\n outcome, rh, wh = network(story, [a1, a2])\n predicted = torch.argmax(outcome, 1)\n for index_elem in range(p1.shape[0]):\n elem_background = [p1[index_elem:index_elem + 1, :], p2[\n index_elem:index_elem + 1, :], p3[index_elem:index_elem +\n 1, :], p4[index_elem:index_elem + 1, :]]\n elem_answers = [a1[index_elem:index_elem + 1, :], a2[\n index_elem:index_elem + 1, :]]\n elem_predicted = predicted[index_elem]\n sgt = explanation_module.get_sgt(network, elem_background,\n elem_answers)\n if len(set(sgt)) > 1:\n covered += 1\n rank, _ = explanation_module.get_rank(elem_background,\n wh[0][0], rh[elem_predicted.item() + 1][0])\n best_prediction = sgt[rank[0] - 1]\n best_correct += (elem_predicted == best_prediction).sum(\n ).item()\n worst_prediction = sgt[rank[-1] - 1]\n worst_correct += (elem_predicted == worst_prediction).sum(\n ).item()\n best_accuracy = float(best_correct / covered\n ) if best_correct > 0 else 0\n worst_accuracy = float(worst_correct / covered\n ) if worst_correct > 0 else 0\n pbar.set_postfix({'Best': best_accuracy, 'Worst': worst_accuracy,\n 'cov': covered / total})\n pbar.update()\n pbar.close()\n return best_accuracy, worst_accuracy\n\n\n<mask token>\n\n\ndef run_val_epoch(network, data_iterator):\n network.eval()\n accuracy = 0\n correct = 0\n total = 0\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n with torch.no_grad():\n for _, data in enumerate(data_iterator):\n (_, p1, p2, p3, p4, a1, a2), y = data\n y = y - 1\n story = torch.cat((p1, p2, p3, p4), 1)\n outcome, _, _ = network(story, [a1, a2])\n predicted = torch.argmax(outcome, 1)\n correct += (predicted == y).sum().item()\n total += y.size(0)\n accuracy = float(correct / total) if correct > 0 else 0\n pbar.set_postfix({'Acc': accuracy})\n pbar.update()\n pbar.close()\n return accuracy\n\n\n<mask token>\n\n\ndef main(argv):\n path_model = FLAGS.path_model\n path_training = FLAGS.path_training\n path_val = FLAGS.path_val\n top_k = FLAGS.top_k\n use_surr = FLAGS.use_surrogate\n run_training(path_training, path_val, path_model, top_k, use_surr)\n print('Training process ended! The new model is stored on {}.'.format(\n path_model))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef run_explanations(network, explanation_module, data_iterator):\n network.eval()\n best_accuracy = 0\n worst_accuracy = 0\n best_correct = 0\n worst_correct = 0\n covered = 0\n total = 0\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n for _, data in enumerate(data_iterator):\n (_, p1, p2, p3, p4, a1, a2), y = data\n y = y - 1\n story = torch.cat((p1, p2, p3, p4), 1)\n background = [p1, p2, p3, p4]\n answers = [a1, a2]\n total += y.size(0)\n with torch.no_grad():\n outcome, rh, wh = network(story, [a1, a2])\n predicted = torch.argmax(outcome, 1)\n for index_elem in range(p1.shape[0]):\n elem_background = [p1[index_elem:index_elem + 1, :], p2[\n index_elem:index_elem + 1, :], p3[index_elem:index_elem +\n 1, :], p4[index_elem:index_elem + 1, :]]\n elem_answers = [a1[index_elem:index_elem + 1, :], a2[\n index_elem:index_elem + 1, :]]\n elem_predicted = predicted[index_elem]\n sgt = explanation_module.get_sgt(network, elem_background,\n elem_answers)\n if len(set(sgt)) > 1:\n covered += 1\n rank, _ = explanation_module.get_rank(elem_background,\n wh[0][0], rh[elem_predicted.item() + 1][0])\n best_prediction = sgt[rank[0] - 1]\n best_correct += (elem_predicted == best_prediction).sum(\n ).item()\n worst_prediction = sgt[rank[-1] - 1]\n worst_correct += (elem_predicted == worst_prediction).sum(\n ).item()\n best_accuracy = float(best_correct / covered\n ) if best_correct > 0 else 0\n worst_accuracy = float(worst_correct / covered\n ) if worst_correct > 0 else 0\n pbar.set_postfix({'Best': best_accuracy, 'Worst': worst_accuracy,\n 'cov': covered / total})\n pbar.update()\n pbar.close()\n return best_accuracy, worst_accuracy\n\n\ndef run_training_epoch(network, data_iterator, loss_function, optimizer,\n max_grad_norm):\n network.train()\n accuracy = 0\n correct = 0\n total = 0\n losses = []\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n for _, data in enumerate(data_iterator):\n optimizer.zero_grad()\n (_, p1, p2, p3, p4, a1, a2), y = data\n y = y - 1\n story = torch.cat((p1, p2, p3, p4), 1)\n outcome, _, _ = network(story, [a1, a2])\n predicted = torch.argmax(outcome, 1)\n loss = loss_function(outcome, y)\n loss.backward()\n losses.append(loss.item())\n correct += (predicted == y).sum().item()\n total += y.size(0)\n accuracy = float(correct / total) if correct > 0 else 0\n nn.utils.clip_grad_norm_(network.parameters(), max_norm=max_grad_norm)\n optimizer.step()\n pbar.set_postfix({'Acc': accuracy})\n pbar.update()\n pbar.close()\n return accuracy, np.mean(losses)\n\n\ndef run_val_epoch(network, data_iterator):\n network.eval()\n accuracy = 0\n correct = 0\n total = 0\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n with torch.no_grad():\n for _, data in enumerate(data_iterator):\n (_, p1, p2, p3, p4, a1, a2), y = data\n y = y - 1\n story = torch.cat((p1, p2, p3, p4), 1)\n outcome, _, _ = network(story, [a1, a2])\n predicted = torch.argmax(outcome, 1)\n correct += (predicted == y).sum().item()\n total += y.size(0)\n accuracy = float(correct / total) if correct > 0 else 0\n pbar.set_postfix({'Acc': accuracy})\n pbar.update()\n pbar.close()\n return accuracy\n\n\ndef run_training(path_training, path_val, path_model, top_k,\n required_explanation):\n config_dict = yaml.safe_load(open('config.yaml', 'r'))\n controller_config = ControllerConfig(**config_dict['controller'])\n memory_config = MemoryConfig(**config_dict['memory'])\n training_parameters = TrainingConfig(**config_dict['training'])\n DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n train_dataset = functions.get_cloze_dataset(path_training)\n val_dataset = functions.get_cloze_dataset(path_val)\n train_iterator = torchtext.data.Iterator(train_dataset, batch_size=\n training_parameters.batch_size, train=True, shuffle=True, device=DEVICE\n )\n val_iterator = torchtext.data.Iterator(val_dataset, batch_size=\n training_parameters.batch_size, train=False, sort=False, device=DEVICE)\n vocab = torch.load('dataset/vocab')['vocab']\n embedding_pretrained_weights = vocab.vectors\n pre_trained_embeddings = torch.as_tensor(embedding_pretrained_weights).to(\n DEVICE)\n padding_index = 1\n embedding_dim = len(embedding_pretrained_weights[0])\n network = model.ClozeModel(controller_config, memory_config,\n embedding_dim, len(pre_trained_embeddings), dropout=\n training_parameters.dropout).to(DEVICE)\n network.embeddings.weight.data.copy_(pre_trained_embeddings)\n network.embeddings.weight.requires_grad = True\n explanation_mod = core.dnc.explanation.ExplanationModule(padding_value=\n padding_index, top_k=top_k)\n loss_function = nn.CrossEntropyLoss()\n optimizer = optim.Adam(network.parameters(), lr=training_parameters.\n learning_rate, eps=1e-07)\n top1_acc = 0.0\n for epoch in range(1, 11):\n print('Running epoch {}'.format(epoch))\n _, _ = run_training_epoch(network, train_iterator, loss_function,\n optimizer, training_parameters.max_grad_norm)\n print('Validation epoch {}'.format(epoch))\n accuracy = run_val_epoch(network, val_iterator)\n if required_explanation:\n print('Explaining training dataset')\n run_explanations(network, explanation_mod, train_iterator)\n print('Explain validation dataset')\n run_explanations(network, explanation_mod, val_iterator)\n if accuracy > top1_acc:\n top1_acc = accuracy\n print('saving model...')\n checkpoint = {'controller_config': config_dict['controller'],\n 'memory_config': config_dict['memory'], 'state_dict':\n network.state_dict(), 'len_embeddings': len(\n pre_trained_embeddings)}\n torch.save(checkpoint, path_model)\n\n\ndef main(argv):\n path_model = FLAGS.path_model\n path_training = FLAGS.path_training\n path_val = FLAGS.path_val\n top_k = FLAGS.top_k\n use_surr = FLAGS.use_surrogate\n run_training(path_training, path_val, path_model, top_k, use_surr)\n print('Training process ended! The new model is stored on {}.'.format(\n path_model))\n\n\n<mask token>\n",
"step-3": "<mask token>\nabsl.flags.DEFINE_string('path_model', None, 'Path of the trained model')\nabsl.flags.DEFINE_string('path_training', None,\n 'Path where is stored the csv dataset')\nabsl.flags.DEFINE_string('path_val', None,\n 'Path where is stored the csv dataset')\nabsl.flags.DEFINE_integer('top_k', 25,\n 'Number of read cells considered for each step')\nabsl.flags.DEFINE_boolean('use_surrogate', False,\n ' Whether to extract surrogate ground truth for explanation')\nabsl.flags.mark_flag_as_required('path_model')\nabsl.flags.mark_flag_as_required('path_training')\nabsl.flags.mark_flag_as_required('path_val')\n\n\ndef run_explanations(network, explanation_module, data_iterator):\n network.eval()\n best_accuracy = 0\n worst_accuracy = 0\n best_correct = 0\n worst_correct = 0\n covered = 0\n total = 0\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n for _, data in enumerate(data_iterator):\n (_, p1, p2, p3, p4, a1, a2), y = data\n y = y - 1\n story = torch.cat((p1, p2, p3, p4), 1)\n background = [p1, p2, p3, p4]\n answers = [a1, a2]\n total += y.size(0)\n with torch.no_grad():\n outcome, rh, wh = network(story, [a1, a2])\n predicted = torch.argmax(outcome, 1)\n for index_elem in range(p1.shape[0]):\n elem_background = [p1[index_elem:index_elem + 1, :], p2[\n index_elem:index_elem + 1, :], p3[index_elem:index_elem +\n 1, :], p4[index_elem:index_elem + 1, :]]\n elem_answers = [a1[index_elem:index_elem + 1, :], a2[\n index_elem:index_elem + 1, :]]\n elem_predicted = predicted[index_elem]\n sgt = explanation_module.get_sgt(network, elem_background,\n elem_answers)\n if len(set(sgt)) > 1:\n covered += 1\n rank, _ = explanation_module.get_rank(elem_background,\n wh[0][0], rh[elem_predicted.item() + 1][0])\n best_prediction = sgt[rank[0] - 1]\n best_correct += (elem_predicted == best_prediction).sum(\n ).item()\n worst_prediction = sgt[rank[-1] - 1]\n worst_correct += (elem_predicted == worst_prediction).sum(\n ).item()\n best_accuracy = float(best_correct / covered\n ) if best_correct > 0 else 0\n worst_accuracy = float(worst_correct / covered\n ) if worst_correct > 0 else 0\n pbar.set_postfix({'Best': best_accuracy, 'Worst': worst_accuracy,\n 'cov': covered / total})\n pbar.update()\n pbar.close()\n return best_accuracy, worst_accuracy\n\n\ndef run_training_epoch(network, data_iterator, loss_function, optimizer,\n max_grad_norm):\n network.train()\n accuracy = 0\n correct = 0\n total = 0\n losses = []\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n for _, data in enumerate(data_iterator):\n optimizer.zero_grad()\n (_, p1, p2, p3, p4, a1, a2), y = data\n y = y - 1\n story = torch.cat((p1, p2, p3, p4), 1)\n outcome, _, _ = network(story, [a1, a2])\n predicted = torch.argmax(outcome, 1)\n loss = loss_function(outcome, y)\n loss.backward()\n losses.append(loss.item())\n correct += (predicted == y).sum().item()\n total += y.size(0)\n accuracy = float(correct / total) if correct > 0 else 0\n nn.utils.clip_grad_norm_(network.parameters(), max_norm=max_grad_norm)\n optimizer.step()\n pbar.set_postfix({'Acc': accuracy})\n pbar.update()\n pbar.close()\n return accuracy, np.mean(losses)\n\n\ndef run_val_epoch(network, data_iterator):\n network.eval()\n accuracy = 0\n correct = 0\n total = 0\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n with torch.no_grad():\n for _, data in enumerate(data_iterator):\n (_, p1, p2, p3, p4, a1, a2), y = data\n y = y - 1\n story = torch.cat((p1, p2, p3, p4), 1)\n outcome, _, _ = network(story, [a1, a2])\n predicted = torch.argmax(outcome, 1)\n correct += (predicted == y).sum().item()\n total += y.size(0)\n accuracy = float(correct / total) if correct > 0 else 0\n pbar.set_postfix({'Acc': accuracy})\n pbar.update()\n pbar.close()\n return accuracy\n\n\ndef run_training(path_training, path_val, path_model, top_k,\n required_explanation):\n config_dict = yaml.safe_load(open('config.yaml', 'r'))\n controller_config = ControllerConfig(**config_dict['controller'])\n memory_config = MemoryConfig(**config_dict['memory'])\n training_parameters = TrainingConfig(**config_dict['training'])\n DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n train_dataset = functions.get_cloze_dataset(path_training)\n val_dataset = functions.get_cloze_dataset(path_val)\n train_iterator = torchtext.data.Iterator(train_dataset, batch_size=\n training_parameters.batch_size, train=True, shuffle=True, device=DEVICE\n )\n val_iterator = torchtext.data.Iterator(val_dataset, batch_size=\n training_parameters.batch_size, train=False, sort=False, device=DEVICE)\n vocab = torch.load('dataset/vocab')['vocab']\n embedding_pretrained_weights = vocab.vectors\n pre_trained_embeddings = torch.as_tensor(embedding_pretrained_weights).to(\n DEVICE)\n padding_index = 1\n embedding_dim = len(embedding_pretrained_weights[0])\n network = model.ClozeModel(controller_config, memory_config,\n embedding_dim, len(pre_trained_embeddings), dropout=\n training_parameters.dropout).to(DEVICE)\n network.embeddings.weight.data.copy_(pre_trained_embeddings)\n network.embeddings.weight.requires_grad = True\n explanation_mod = core.dnc.explanation.ExplanationModule(padding_value=\n padding_index, top_k=top_k)\n loss_function = nn.CrossEntropyLoss()\n optimizer = optim.Adam(network.parameters(), lr=training_parameters.\n learning_rate, eps=1e-07)\n top1_acc = 0.0\n for epoch in range(1, 11):\n print('Running epoch {}'.format(epoch))\n _, _ = run_training_epoch(network, train_iterator, loss_function,\n optimizer, training_parameters.max_grad_norm)\n print('Validation epoch {}'.format(epoch))\n accuracy = run_val_epoch(network, val_iterator)\n if required_explanation:\n print('Explaining training dataset')\n run_explanations(network, explanation_mod, train_iterator)\n print('Explain validation dataset')\n run_explanations(network, explanation_mod, val_iterator)\n if accuracy > top1_acc:\n top1_acc = accuracy\n print('saving model...')\n checkpoint = {'controller_config': config_dict['controller'],\n 'memory_config': config_dict['memory'], 'state_dict':\n network.state_dict(), 'len_embeddings': len(\n pre_trained_embeddings)}\n torch.save(checkpoint, path_model)\n\n\ndef main(argv):\n path_model = FLAGS.path_model\n path_training = FLAGS.path_training\n path_val = FLAGS.path_val\n top_k = FLAGS.top_k\n use_surr = FLAGS.use_surrogate\n run_training(path_training, path_val, path_model, top_k, use_surr)\n print('Training process ended! The new model is stored on {}.'.format(\n path_model))\n\n\nif __name__ == '__main__':\n absl.app.run(main)\n",
"step-4": "<mask token>\nFLAGS = absl.flags.FLAGS\nabsl.flags.DEFINE_string('path_model', None, 'Path of the trained model')\nabsl.flags.DEFINE_string('path_training', None,\n 'Path where is stored the csv dataset')\nabsl.flags.DEFINE_string('path_val', None,\n 'Path where is stored the csv dataset')\nabsl.flags.DEFINE_integer('top_k', 25,\n 'Number of read cells considered for each step')\nabsl.flags.DEFINE_boolean('use_surrogate', False,\n ' Whether to extract surrogate ground truth for explanation')\nabsl.flags.mark_flag_as_required('path_model')\nabsl.flags.mark_flag_as_required('path_training')\nabsl.flags.mark_flag_as_required('path_val')\n\n\ndef run_explanations(network, explanation_module, data_iterator):\n network.eval()\n best_accuracy = 0\n worst_accuracy = 0\n best_correct = 0\n worst_correct = 0\n covered = 0\n total = 0\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n for _, data in enumerate(data_iterator):\n (_, p1, p2, p3, p4, a1, a2), y = data\n y = y - 1\n story = torch.cat((p1, p2, p3, p4), 1)\n background = [p1, p2, p3, p4]\n answers = [a1, a2]\n total += y.size(0)\n with torch.no_grad():\n outcome, rh, wh = network(story, [a1, a2])\n predicted = torch.argmax(outcome, 1)\n for index_elem in range(p1.shape[0]):\n elem_background = [p1[index_elem:index_elem + 1, :], p2[\n index_elem:index_elem + 1, :], p3[index_elem:index_elem +\n 1, :], p4[index_elem:index_elem + 1, :]]\n elem_answers = [a1[index_elem:index_elem + 1, :], a2[\n index_elem:index_elem + 1, :]]\n elem_predicted = predicted[index_elem]\n sgt = explanation_module.get_sgt(network, elem_background,\n elem_answers)\n if len(set(sgt)) > 1:\n covered += 1\n rank, _ = explanation_module.get_rank(elem_background,\n wh[0][0], rh[elem_predicted.item() + 1][0])\n best_prediction = sgt[rank[0] - 1]\n best_correct += (elem_predicted == best_prediction).sum(\n ).item()\n worst_prediction = sgt[rank[-1] - 1]\n worst_correct += (elem_predicted == worst_prediction).sum(\n ).item()\n best_accuracy = float(best_correct / covered\n ) if best_correct > 0 else 0\n worst_accuracy = float(worst_correct / covered\n ) if worst_correct > 0 else 0\n pbar.set_postfix({'Best': best_accuracy, 'Worst': worst_accuracy,\n 'cov': covered / total})\n pbar.update()\n pbar.close()\n return best_accuracy, worst_accuracy\n\n\ndef run_training_epoch(network, data_iterator, loss_function, optimizer,\n max_grad_norm):\n network.train()\n accuracy = 0\n correct = 0\n total = 0\n losses = []\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n for _, data in enumerate(data_iterator):\n optimizer.zero_grad()\n (_, p1, p2, p3, p4, a1, a2), y = data\n y = y - 1\n story = torch.cat((p1, p2, p3, p4), 1)\n outcome, _, _ = network(story, [a1, a2])\n predicted = torch.argmax(outcome, 1)\n loss = loss_function(outcome, y)\n loss.backward()\n losses.append(loss.item())\n correct += (predicted == y).sum().item()\n total += y.size(0)\n accuracy = float(correct / total) if correct > 0 else 0\n nn.utils.clip_grad_norm_(network.parameters(), max_norm=max_grad_norm)\n optimizer.step()\n pbar.set_postfix({'Acc': accuracy})\n pbar.update()\n pbar.close()\n return accuracy, np.mean(losses)\n\n\ndef run_val_epoch(network, data_iterator):\n network.eval()\n accuracy = 0\n correct = 0\n total = 0\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n with torch.no_grad():\n for _, data in enumerate(data_iterator):\n (_, p1, p2, p3, p4, a1, a2), y = data\n y = y - 1\n story = torch.cat((p1, p2, p3, p4), 1)\n outcome, _, _ = network(story, [a1, a2])\n predicted = torch.argmax(outcome, 1)\n correct += (predicted == y).sum().item()\n total += y.size(0)\n accuracy = float(correct / total) if correct > 0 else 0\n pbar.set_postfix({'Acc': accuracy})\n pbar.update()\n pbar.close()\n return accuracy\n\n\ndef run_training(path_training, path_val, path_model, top_k,\n required_explanation):\n config_dict = yaml.safe_load(open('config.yaml', 'r'))\n controller_config = ControllerConfig(**config_dict['controller'])\n memory_config = MemoryConfig(**config_dict['memory'])\n training_parameters = TrainingConfig(**config_dict['training'])\n DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n train_dataset = functions.get_cloze_dataset(path_training)\n val_dataset = functions.get_cloze_dataset(path_val)\n train_iterator = torchtext.data.Iterator(train_dataset, batch_size=\n training_parameters.batch_size, train=True, shuffle=True, device=DEVICE\n )\n val_iterator = torchtext.data.Iterator(val_dataset, batch_size=\n training_parameters.batch_size, train=False, sort=False, device=DEVICE)\n vocab = torch.load('dataset/vocab')['vocab']\n embedding_pretrained_weights = vocab.vectors\n pre_trained_embeddings = torch.as_tensor(embedding_pretrained_weights).to(\n DEVICE)\n padding_index = 1\n embedding_dim = len(embedding_pretrained_weights[0])\n network = model.ClozeModel(controller_config, memory_config,\n embedding_dim, len(pre_trained_embeddings), dropout=\n training_parameters.dropout).to(DEVICE)\n network.embeddings.weight.data.copy_(pre_trained_embeddings)\n network.embeddings.weight.requires_grad = True\n explanation_mod = core.dnc.explanation.ExplanationModule(padding_value=\n padding_index, top_k=top_k)\n loss_function = nn.CrossEntropyLoss()\n optimizer = optim.Adam(network.parameters(), lr=training_parameters.\n learning_rate, eps=1e-07)\n top1_acc = 0.0\n for epoch in range(1, 11):\n print('Running epoch {}'.format(epoch))\n _, _ = run_training_epoch(network, train_iterator, loss_function,\n optimizer, training_parameters.max_grad_norm)\n print('Validation epoch {}'.format(epoch))\n accuracy = run_val_epoch(network, val_iterator)\n if required_explanation:\n print('Explaining training dataset')\n run_explanations(network, explanation_mod, train_iterator)\n print('Explain validation dataset')\n run_explanations(network, explanation_mod, val_iterator)\n if accuracy > top1_acc:\n top1_acc = accuracy\n print('saving model...')\n checkpoint = {'controller_config': config_dict['controller'],\n 'memory_config': config_dict['memory'], 'state_dict':\n network.state_dict(), 'len_embeddings': len(\n pre_trained_embeddings)}\n torch.save(checkpoint, path_model)\n\n\ndef main(argv):\n path_model = FLAGS.path_model\n path_training = FLAGS.path_training\n path_val = FLAGS.path_val\n top_k = FLAGS.top_k\n use_surr = FLAGS.use_surrogate\n run_training(path_training, path_val, path_model, top_k, use_surr)\n print('Training process ended! The new model is stored on {}.'.format(\n path_model))\n\n\nif __name__ == '__main__':\n absl.app.run(main)\n",
"step-5": "import torch\nimport torch.nn as nn\nimport torch.optim as optim\nimport torchtext\nimport absl.flags\nimport absl.app\nimport pickle\nimport yaml\nimport numpy as np\nfrom tqdm import tqdm\nfrom core import model\nimport core.dnc.explanation\nfrom core import functions\nfrom core.config import ControllerConfig, MemoryConfig, TrainingConfig\n\n# user flags\nFLAGS = absl.flags.FLAGS\n\nabsl.flags.DEFINE_string(\"path_model\", None, \"Path of the trained model\")\nabsl.flags.DEFINE_string(\"path_training\", None, \"Path where is stored the csv dataset\")\nabsl.flags.DEFINE_string(\"path_val\", None, \"Path where is stored the csv dataset\")\nabsl.flags.DEFINE_integer(\"top_k\", 25, \"Number of read cells considered for each step\")\nabsl.flags.DEFINE_boolean(\"use_surrogate\", False, \" Whether to extract surrogate ground truth for explanation\")\n\nabsl.flags.mark_flag_as_required(\"path_model\")\nabsl.flags.mark_flag_as_required(\"path_training\")\nabsl.flags.mark_flag_as_required(\"path_val\")\n\n\ndef run_explanations(network, explanation_module, data_iterator):\n network.eval()\n best_accuracy = 0\n worst_accuracy = 0\n best_correct = 0\n worst_correct = 0\n covered = 0\n total = 0\n\n #print stuff \n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n\n for _, data in enumerate(data_iterator): \n \n (_, p1, p2, p3, p4, a1, a2), y = data\n\n y = y - 1 # gold index\n story = torch.cat((p1,p2,p3,p4),1)\n background = [p1,p2,p3,p4]\n answers = [a1,a2]\n total += y.size(0)\n #get output \n with torch.no_grad():\n outcome, rh, wh = network(story,[a1,a2])\n predicted = torch.argmax(outcome, 1)\n\n for index_elem in range(p1.shape[0]):\n elem_background = [p1[index_elem:index_elem+1,:], p2[index_elem:index_elem+1,:],p3[index_elem:index_elem+1,:],p4[index_elem:index_elem+1,:]]\n elem_answers = [a1[index_elem:index_elem+1,:], a2[index_elem:index_elem+1,:]]\n elem_predicted = predicted[index_elem]\n sgt = explanation_module.get_sgt(network, elem_background,elem_answers )\n \n # case where there are contraddictory surrogate ground truth\n if len(set(sgt)) > 1:\n covered += 1\n rank, _ = explanation_module.get_rank(elem_background,wh[0][0],rh[elem_predicted.item()+1][0] )\n best_prediction = sgt[rank[0]-1]\n best_correct += (elem_predicted == best_prediction).sum().item()\n worst_prediction = sgt[rank[-1]-1]\n worst_correct += (elem_predicted == worst_prediction).sum().item()\n best_accuracy = float(best_correct / covered) if best_correct > 0 else 0\n worst_accuracy = float(worst_correct / covered) if worst_correct > 0 else 0\n #print\n pbar.set_postfix({'Best':best_accuracy,'Worst':worst_accuracy, \n 'cov':covered/total})\n pbar.update()\n\n pbar.close()\n return best_accuracy, worst_accuracy\n\ndef run_training_epoch(network, data_iterator, loss_function, optimizer, max_grad_norm):\n network.train()\n\n # init cumulative variables\n accuracy = 0\n correct = 0\n total = 0\n losses = []\n\n # print utility\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n\n #data_iterator.init_epoch()\n\n for _, data in enumerate(data_iterator): \n \n optimizer.zero_grad()\n (_, p1, p2, p3, p4, a1, a2), y = data\n y = y - 1 # gold index\n story = torch.cat((p1,p2,p3,p4),1)\n\n # get output\n outcome, _, _ = network(story,[a1,a2])\n predicted = torch.argmax(outcome, 1)\n\n # get loss\n loss = loss_function(outcome,y)\n loss.backward()\n losses.append(loss.item())\n \n # update metrics\n correct += (predicted == y).sum().item()\n total += y.size(0)\n accuracy = float(correct / total) if correct > 0 else 0 \n \n # update weights\n nn.utils.clip_grad_norm_(network.parameters(), max_norm=max_grad_norm)\n optimizer.step()\n pbar.set_postfix({'Acc':accuracy})\n #print\n pbar.update()\n\n pbar.close()\n return accuracy, np.mean(losses)\n\ndef run_val_epoch(network, data_iterator):\n network.eval()\n\n accuracy = 0\n correct = 0\n total = 0\n pbar = tqdm()\n pbar.reset(total=len(data_iterator))\n\n with torch.no_grad():\n for _, data in enumerate(data_iterator):\n (_,p1, p2, p3, p4, a1, a2), y = data\n\n y = y - 1 # gold index\n story = torch.cat((p1,p2,p3,p4),1)\n\n\n outcome, _, _ = network(story,[a1,a2])\n # update metrics\n predicted = torch.argmax(outcome, 1)\n correct += (predicted == y).sum().item()\n total += y.size(0)\n accuracy = float(correct / total) if correct > 0 else 0 \n\n \n #print\n pbar.set_postfix({'Acc':accuracy})\n pbar.update()\n pbar.close()\n return accuracy\n\ndef run_training(path_training, path_val, path_model, top_k, required_explanation):\n #get configuration from dict and user\n config_dict = yaml.safe_load(open(\"config.yaml\", 'r'))\n controller_config = ControllerConfig(**config_dict['controller'])\n memory_config = MemoryConfig(**config_dict['memory'])\n training_parameters = TrainingConfig(**config_dict['training'])\n\n # get available device\n DEVICE = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n train_dataset = functions.get_cloze_dataset(path_training)\n val_dataset = functions.get_cloze_dataset(path_val)\n\n train_iterator = torchtext.data.Iterator(train_dataset,batch_size=training_parameters.batch_size, train=True, shuffle=True, device=DEVICE)\n val_iterator = torchtext.data.Iterator(val_dataset,batch_size=training_parameters.batch_size, train=False, sort=False,device=DEVICE)\n\n # Get Embedding\n vocab = torch.load(\"dataset/vocab\")['vocab']\n embedding_pretrained_weights = vocab.vectors\n pre_trained_embeddings = torch.as_tensor(embedding_pretrained_weights).to(DEVICE)\n padding_index=1\n embedding_dim = len(embedding_pretrained_weights[0])\n\n\n #init model\n network = model.ClozeModel(controller_config, memory_config, embedding_dim,len(pre_trained_embeddings),dropout=training_parameters.dropout).to(DEVICE)\n network.embeddings.weight.data.copy_(pre_trained_embeddings)\n network.embeddings.weight.requires_grad = True\n \n explanation_mod = core.dnc.explanation.ExplanationModule(padding_value=padding_index,top_k=top_k)\n loss_function = nn.CrossEntropyLoss()\n optimizer = optim.Adam(network.parameters(), lr=training_parameters.learning_rate, eps=1e-7)\n\n # initialize variables\n top1_acc = 0.0\n for epoch in range(1,11):\n print(\"Running epoch {}\".format(epoch))\n _,_ = run_training_epoch(network,train_iterator,loss_function,optimizer,training_parameters.max_grad_norm)\n print(\"Validation epoch {}\".format(epoch))\n accuracy = run_val_epoch(network,val_iterator)\n if required_explanation:\n print(\"Explaining training dataset\")\n run_explanations(network,explanation_mod,train_iterator)\n print(\"Explain validation dataset\")\n run_explanations(network,explanation_mod,val_iterator)\n\n if accuracy > top1_acc:\n top1_acc = accuracy\n print(\"saving model...\")\n checkpoint = {'controller_config':config_dict['controller'], 'memory_config':config_dict['memory'],\n 'state_dict':network.state_dict(), 'len_embeddings':len(pre_trained_embeddings)}\n torch.save(checkpoint, path_model)\n\ndef main(argv):\n path_model = FLAGS.path_model\n path_training = FLAGS.path_training\n path_val = FLAGS.path_val\n top_k = FLAGS.top_k\n use_surr = FLAGS.use_surrogate\n run_training(path_training,path_val, path_model, top_k, use_surr)\n print(\"Training process ended! The new model is stored on {}.\".format(path_model))\n\nif __name__ == '__main__':\n absl.app.run(main)",
"step-ids": [
3,
5,
6,
7,
9
]
}
|
[
3,
5,
6,
7,
9
] |
#사각형의 면적을 구하는 프로그램을 작성하시오,
#사각형의 면적 = 높이*밑변
height=int(input('높이 입력: '))
base=int(input('밑변 입력: '))
area=height*base
print('높이는',height,' 밑변은',base,'사각형의 면적은',area,'입니다.')
|
normal
|
{
"blob_id": "f9b48c1b6489d8981e192838cf1c734e2296ab15",
"index": 9833,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('높이는', height, ' 밑변은', base, '사각형의 면적은', area, '입니다.')\n",
"step-3": "height = int(input('높이 입력: '))\nbase = int(input('밑변 입력: '))\narea = height * base\nprint('높이는', height, ' 밑변은', base, '사각형의 면적은', area, '입니다.')\n",
"step-4": "#사각형의 면적을 구하는 프로그램을 작성하시오,\r\n#사각형의 면적 = 높이*밑변\r\n\r\nheight=int(input('높이 입력: '))\r\nbase=int(input('밑변 입력: '))\r\n\r\narea=height*base\r\n\r\nprint('높이는',height,' 밑변은',base,'사각형의 면적은',area,'입니다.')\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def predict(theta, X):
probability = sigmoid(X * theta.T)
return [(1 if x >= 0.5 else 0) for x in probability]
def implement_for_LR(X_train, X_test, y_train, y_test, lamda=1):
n = X_train.shape[1]
X_train = np.array(X_train.values)
y_train = np.array(y_train.values)
theta = np.zeros(n)
result = optimize.fmin_tnc(func=costReg, x0=theta, fprime=gradientReg,
args=(X_train, y_train, lamda))
theta_min = np.matrix(result[0])
predictions = predict(theta_min, X_test)
correct = [(1 if a == 1 and b == 1 or a == 0 and b == 0 else 0) for a,
b in zip(predictions, y_test)]
accuracy = sum(map(int, correct)) % len(correct)
print('accuracy = {0}%'.format(accuracy))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def gradientReg(theta, X, y, lamda):
theta = np.matrix(theta)
X = np.matrix(X)
y = np.matrix(y)
parameters = int(theta.ravel().shape[1])
grad = np.zeros(parameters)
error = sigmoid(X * theta.T) - y
for i in range(parameters):
term = np.multiply(error, X[:, i])
if i == 0:
grad[i] = np.sum(term) / len(X)
else:
grad[i] = np.sum(term) / len(X) + lamda / len(X) * theta[:, i]
return grad
def predict(theta, X):
probability = sigmoid(X * theta.T)
return [(1 if x >= 0.5 else 0) for x in probability]
def implement_for_LR(X_train, X_test, y_train, y_test, lamda=1):
n = X_train.shape[1]
X_train = np.array(X_train.values)
y_train = np.array(y_train.values)
theta = np.zeros(n)
result = optimize.fmin_tnc(func=costReg, x0=theta, fprime=gradientReg,
args=(X_train, y_train, lamda))
theta_min = np.matrix(result[0])
predictions = predict(theta_min, X_test)
correct = [(1 if a == 1 and b == 1 or a == 0 and b == 0 else 0) for a,
b in zip(predictions, y_test)]
accuracy = sum(map(int, correct)) % len(correct)
print('accuracy = {0}%'.format(accuracy))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def sigmoid(z):
return 1 / (1 + np.exp(-z))
def costReg(theta, X, y, lamda):
theta = np.matrix(theta)
X = np.matrix(X)
y = np.matrix(y)
first = np.multiply(-y, np.log(sigmoid(X * theta.T)))
second = np.multiply(1 - y, np.log(1 - sigmoid(X * theta.T)))
reg = lamda / (2 * len(X)) * np.sum(np.power(theta[:, 1:theta.shape[1]], 2)
)
return np.sum(first - second) / len(X) + reg
def gradientReg(theta, X, y, lamda):
theta = np.matrix(theta)
X = np.matrix(X)
y = np.matrix(y)
parameters = int(theta.ravel().shape[1])
grad = np.zeros(parameters)
error = sigmoid(X * theta.T) - y
for i in range(parameters):
term = np.multiply(error, X[:, i])
if i == 0:
grad[i] = np.sum(term) / len(X)
else:
grad[i] = np.sum(term) / len(X) + lamda / len(X) * theta[:, i]
return grad
def predict(theta, X):
probability = sigmoid(X * theta.T)
return [(1 if x >= 0.5 else 0) for x in probability]
def implement_for_LR(X_train, X_test, y_train, y_test, lamda=1):
n = X_train.shape[1]
X_train = np.array(X_train.values)
y_train = np.array(y_train.values)
theta = np.zeros(n)
result = optimize.fmin_tnc(func=costReg, x0=theta, fprime=gradientReg,
args=(X_train, y_train, lamda))
theta_min = np.matrix(result[0])
predictions = predict(theta_min, X_test)
correct = [(1 if a == 1 and b == 1 or a == 0 and b == 0 else 0) for a,
b in zip(predictions, y_test)]
accuracy = sum(map(int, correct)) % len(correct)
print('accuracy = {0}%'.format(accuracy))
<|reserved_special_token_1|>
import numpy as np
from scipy import optimize
def sigmoid(z):
return 1 / (1 + np.exp(-z))
def costReg(theta, X, y, lamda):
theta = np.matrix(theta)
X = np.matrix(X)
y = np.matrix(y)
first = np.multiply(-y, np.log(sigmoid(X * theta.T)))
second = np.multiply(1 - y, np.log(1 - sigmoid(X * theta.T)))
reg = lamda / (2 * len(X)) * np.sum(np.power(theta[:, 1:theta.shape[1]], 2)
)
return np.sum(first - second) / len(X) + reg
def gradientReg(theta, X, y, lamda):
theta = np.matrix(theta)
X = np.matrix(X)
y = np.matrix(y)
parameters = int(theta.ravel().shape[1])
grad = np.zeros(parameters)
error = sigmoid(X * theta.T) - y
for i in range(parameters):
term = np.multiply(error, X[:, i])
if i == 0:
grad[i] = np.sum(term) / len(X)
else:
grad[i] = np.sum(term) / len(X) + lamda / len(X) * theta[:, i]
return grad
def predict(theta, X):
probability = sigmoid(X * theta.T)
return [(1 if x >= 0.5 else 0) for x in probability]
def implement_for_LR(X_train, X_test, y_train, y_test, lamda=1):
n = X_train.shape[1]
X_train = np.array(X_train.values)
y_train = np.array(y_train.values)
theta = np.zeros(n)
result = optimize.fmin_tnc(func=costReg, x0=theta, fprime=gradientReg,
args=(X_train, y_train, lamda))
theta_min = np.matrix(result[0])
predictions = predict(theta_min, X_test)
correct = [(1 if a == 1 and b == 1 or a == 0 and b == 0 else 0) for a,
b in zip(predictions, y_test)]
accuracy = sum(map(int, correct)) % len(correct)
print('accuracy = {0}%'.format(accuracy))
<|reserved_special_token_1|>
#!/usr/bin/env/ python
# -*- coding:utf-8 -*-
# Created by: Vanish
# Created on: 2019/9/25
import numpy as np
from scipy import optimize
def sigmoid(z):
return 1 / (1 + np.exp(-z))
def costReg(theta, X, y, lamda):
theta = np.matrix(theta)
X = np.matrix(X)
y = np.matrix(y)
first = np.multiply(-y, np.log(sigmoid(X * theta.T)))
second = np.multiply((1 - y), np.log(1 - sigmoid(X * theta.T)))
reg = (lamda / (2 * len(X))) * np.sum(np.power(theta[:, 1:theta.shape[1]], 2))
return np.sum(first - second) / len(X) + reg
def gradientReg(theta, X, y, lamda):
theta = np.matrix(theta)
X = np.matrix(X)
y = np.matrix(y)
parameters = int(theta.ravel().shape[1])
grad = np.zeros(parameters)
error = sigmoid(X * theta.T) - y
for i in range(parameters):
term = np.multiply(error, X[:, i])
if (i == 0):
grad[i] = np.sum(term) / len(X)
else:
grad[i] = (np.sum(term) / len(X)) + ((lamda / len(X)) * theta[:, i])
return grad
def predict(theta, X):
probability = sigmoid(X * theta.T)
return [1 if x >= 0.5 else 0 for x in probability]
def implement_for_LR(X_train, X_test, y_train, y_test,lamda=1):
n = X_train.shape[1]
# convert to numpy arrays and initalize the parameter array theta
X_train = np.array(X_train.values)
y_train = np.array(y_train.values)
theta = np.zeros(n)
result = optimize.fmin_tnc(func=costReg, x0=theta, fprime=gradientReg, args=(X_train, y_train, lamda))
theta_min = np.matrix(result[0])
predictions = predict(theta_min, X_test)
correct = [1 if ((a == 1 and b == 1) or (a == 0 and b == 0)) else 0 for (a, b) in zip(predictions, y_test)]
accuracy = (sum(map(int, correct)) % len(correct))
print('accuracy = {0}%'.format(accuracy))
|
flexible
|
{
"blob_id": "991c361043eb1539a80b5e8e1db44bc365e7e639",
"index": 6345,
"step-1": "<mask token>\n\n\ndef predict(theta, X):\n probability = sigmoid(X * theta.T)\n return [(1 if x >= 0.5 else 0) for x in probability]\n\n\ndef implement_for_LR(X_train, X_test, y_train, y_test, lamda=1):\n n = X_train.shape[1]\n X_train = np.array(X_train.values)\n y_train = np.array(y_train.values)\n theta = np.zeros(n)\n result = optimize.fmin_tnc(func=costReg, x0=theta, fprime=gradientReg,\n args=(X_train, y_train, lamda))\n theta_min = np.matrix(result[0])\n predictions = predict(theta_min, X_test)\n correct = [(1 if a == 1 and b == 1 or a == 0 and b == 0 else 0) for a,\n b in zip(predictions, y_test)]\n accuracy = sum(map(int, correct)) % len(correct)\n print('accuracy = {0}%'.format(accuracy))\n",
"step-2": "<mask token>\n\n\ndef gradientReg(theta, X, y, lamda):\n theta = np.matrix(theta)\n X = np.matrix(X)\n y = np.matrix(y)\n parameters = int(theta.ravel().shape[1])\n grad = np.zeros(parameters)\n error = sigmoid(X * theta.T) - y\n for i in range(parameters):\n term = np.multiply(error, X[:, i])\n if i == 0:\n grad[i] = np.sum(term) / len(X)\n else:\n grad[i] = np.sum(term) / len(X) + lamda / len(X) * theta[:, i]\n return grad\n\n\ndef predict(theta, X):\n probability = sigmoid(X * theta.T)\n return [(1 if x >= 0.5 else 0) for x in probability]\n\n\ndef implement_for_LR(X_train, X_test, y_train, y_test, lamda=1):\n n = X_train.shape[1]\n X_train = np.array(X_train.values)\n y_train = np.array(y_train.values)\n theta = np.zeros(n)\n result = optimize.fmin_tnc(func=costReg, x0=theta, fprime=gradientReg,\n args=(X_train, y_train, lamda))\n theta_min = np.matrix(result[0])\n predictions = predict(theta_min, X_test)\n correct = [(1 if a == 1 and b == 1 or a == 0 and b == 0 else 0) for a,\n b in zip(predictions, y_test)]\n accuracy = sum(map(int, correct)) % len(correct)\n print('accuracy = {0}%'.format(accuracy))\n",
"step-3": "<mask token>\n\n\ndef sigmoid(z):\n return 1 / (1 + np.exp(-z))\n\n\ndef costReg(theta, X, y, lamda):\n theta = np.matrix(theta)\n X = np.matrix(X)\n y = np.matrix(y)\n first = np.multiply(-y, np.log(sigmoid(X * theta.T)))\n second = np.multiply(1 - y, np.log(1 - sigmoid(X * theta.T)))\n reg = lamda / (2 * len(X)) * np.sum(np.power(theta[:, 1:theta.shape[1]], 2)\n )\n return np.sum(first - second) / len(X) + reg\n\n\ndef gradientReg(theta, X, y, lamda):\n theta = np.matrix(theta)\n X = np.matrix(X)\n y = np.matrix(y)\n parameters = int(theta.ravel().shape[1])\n grad = np.zeros(parameters)\n error = sigmoid(X * theta.T) - y\n for i in range(parameters):\n term = np.multiply(error, X[:, i])\n if i == 0:\n grad[i] = np.sum(term) / len(X)\n else:\n grad[i] = np.sum(term) / len(X) + lamda / len(X) * theta[:, i]\n return grad\n\n\ndef predict(theta, X):\n probability = sigmoid(X * theta.T)\n return [(1 if x >= 0.5 else 0) for x in probability]\n\n\ndef implement_for_LR(X_train, X_test, y_train, y_test, lamda=1):\n n = X_train.shape[1]\n X_train = np.array(X_train.values)\n y_train = np.array(y_train.values)\n theta = np.zeros(n)\n result = optimize.fmin_tnc(func=costReg, x0=theta, fprime=gradientReg,\n args=(X_train, y_train, lamda))\n theta_min = np.matrix(result[0])\n predictions = predict(theta_min, X_test)\n correct = [(1 if a == 1 and b == 1 or a == 0 and b == 0 else 0) for a,\n b in zip(predictions, y_test)]\n accuracy = sum(map(int, correct)) % len(correct)\n print('accuracy = {0}%'.format(accuracy))\n",
"step-4": "import numpy as np\nfrom scipy import optimize\n\n\ndef sigmoid(z):\n return 1 / (1 + np.exp(-z))\n\n\ndef costReg(theta, X, y, lamda):\n theta = np.matrix(theta)\n X = np.matrix(X)\n y = np.matrix(y)\n first = np.multiply(-y, np.log(sigmoid(X * theta.T)))\n second = np.multiply(1 - y, np.log(1 - sigmoid(X * theta.T)))\n reg = lamda / (2 * len(X)) * np.sum(np.power(theta[:, 1:theta.shape[1]], 2)\n )\n return np.sum(first - second) / len(X) + reg\n\n\ndef gradientReg(theta, X, y, lamda):\n theta = np.matrix(theta)\n X = np.matrix(X)\n y = np.matrix(y)\n parameters = int(theta.ravel().shape[1])\n grad = np.zeros(parameters)\n error = sigmoid(X * theta.T) - y\n for i in range(parameters):\n term = np.multiply(error, X[:, i])\n if i == 0:\n grad[i] = np.sum(term) / len(X)\n else:\n grad[i] = np.sum(term) / len(X) + lamda / len(X) * theta[:, i]\n return grad\n\n\ndef predict(theta, X):\n probability = sigmoid(X * theta.T)\n return [(1 if x >= 0.5 else 0) for x in probability]\n\n\ndef implement_for_LR(X_train, X_test, y_train, y_test, lamda=1):\n n = X_train.shape[1]\n X_train = np.array(X_train.values)\n y_train = np.array(y_train.values)\n theta = np.zeros(n)\n result = optimize.fmin_tnc(func=costReg, x0=theta, fprime=gradientReg,\n args=(X_train, y_train, lamda))\n theta_min = np.matrix(result[0])\n predictions = predict(theta_min, X_test)\n correct = [(1 if a == 1 and b == 1 or a == 0 and b == 0 else 0) for a,\n b in zip(predictions, y_test)]\n accuracy = sum(map(int, correct)) % len(correct)\n print('accuracy = {0}%'.format(accuracy))\n",
"step-5": "#!/usr/bin/env/ python\n# -*- coding:utf-8 -*-\n# Created by: Vanish\n# Created on: 2019/9/25\n\n\nimport numpy as np\nfrom scipy import optimize\n\n\ndef sigmoid(z):\n return 1 / (1 + np.exp(-z))\n\ndef costReg(theta, X, y, lamda):\n theta = np.matrix(theta)\n X = np.matrix(X)\n y = np.matrix(y)\n first = np.multiply(-y, np.log(sigmoid(X * theta.T)))\n second = np.multiply((1 - y), np.log(1 - sigmoid(X * theta.T)))\n reg = (lamda / (2 * len(X))) * np.sum(np.power(theta[:, 1:theta.shape[1]], 2))\n return np.sum(first - second) / len(X) + reg\n\ndef gradientReg(theta, X, y, lamda):\n theta = np.matrix(theta)\n X = np.matrix(X)\n y = np.matrix(y)\n\n parameters = int(theta.ravel().shape[1])\n grad = np.zeros(parameters)\n\n error = sigmoid(X * theta.T) - y\n\n for i in range(parameters):\n term = np.multiply(error, X[:, i])\n\n if (i == 0):\n grad[i] = np.sum(term) / len(X)\n else:\n grad[i] = (np.sum(term) / len(X)) + ((lamda / len(X)) * theta[:, i])\n\n return grad\n\ndef predict(theta, X):\n probability = sigmoid(X * theta.T)\n return [1 if x >= 0.5 else 0 for x in probability]\n\ndef implement_for_LR(X_train, X_test, y_train, y_test,lamda=1):\n n = X_train.shape[1]\n # convert to numpy arrays and initalize the parameter array theta\n X_train = np.array(X_train.values)\n y_train = np.array(y_train.values)\n theta = np.zeros(n)\n\n result = optimize.fmin_tnc(func=costReg, x0=theta, fprime=gradientReg, args=(X_train, y_train, lamda))\n\n theta_min = np.matrix(result[0])\n predictions = predict(theta_min, X_test)\n correct = [1 if ((a == 1 and b == 1) or (a == 0 and b == 0)) else 0 for (a, b) in zip(predictions, y_test)]\n accuracy = (sum(map(int, correct)) % len(correct))\n print('accuracy = {0}%'.format(accuracy))",
"step-ids": [
2,
3,
5,
6,
7
]
}
|
[
2,
3,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('begin')
imgui.create_context()
<|reserved_special_token_0|>
imgui.get_io().fonts.get_tex_data_as_rgba32()
imgui.new_frame()
imgui.begin('Window', True)
imgui.text('HelloWorld')
imgui.end()
imgui.render()
imgui.end_frame()
print('end')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('begin')
imgui.create_context()
imgui.get_io().display_size = 100, 100
imgui.get_io().fonts.get_tex_data_as_rgba32()
imgui.new_frame()
imgui.begin('Window', True)
imgui.text('HelloWorld')
imgui.end()
imgui.render()
imgui.end_frame()
print('end')
<|reserved_special_token_1|>
import imgui
print('begin')
imgui.create_context()
imgui.get_io().display_size = 100, 100
imgui.get_io().fonts.get_tex_data_as_rgba32()
imgui.new_frame()
imgui.begin('Window', True)
imgui.text('HelloWorld')
imgui.end()
imgui.render()
imgui.end_frame()
print('end')
<|reserved_special_token_1|>
import imgui
print("begin")
imgui.create_context()
imgui.get_io().display_size = 100, 100
imgui.get_io().fonts.get_tex_data_as_rgba32()
imgui.new_frame()
imgui.begin("Window", True)
imgui.text("HelloWorld")
imgui.end()
imgui.render()
imgui.end_frame()
print("end")
|
flexible
|
{
"blob_id": "146cae8f60b908f04bc09b10c4e30693daec89b4",
"index": 6560,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('begin')\nimgui.create_context()\n<mask token>\nimgui.get_io().fonts.get_tex_data_as_rgba32()\nimgui.new_frame()\nimgui.begin('Window', True)\nimgui.text('HelloWorld')\nimgui.end()\nimgui.render()\nimgui.end_frame()\nprint('end')\n",
"step-3": "<mask token>\nprint('begin')\nimgui.create_context()\nimgui.get_io().display_size = 100, 100\nimgui.get_io().fonts.get_tex_data_as_rgba32()\nimgui.new_frame()\nimgui.begin('Window', True)\nimgui.text('HelloWorld')\nimgui.end()\nimgui.render()\nimgui.end_frame()\nprint('end')\n",
"step-4": "import imgui\nprint('begin')\nimgui.create_context()\nimgui.get_io().display_size = 100, 100\nimgui.get_io().fonts.get_tex_data_as_rgba32()\nimgui.new_frame()\nimgui.begin('Window', True)\nimgui.text('HelloWorld')\nimgui.end()\nimgui.render()\nimgui.end_frame()\nprint('end')\n",
"step-5": "import imgui\nprint(\"begin\")\nimgui.create_context()\nimgui.get_io().display_size = 100, 100\nimgui.get_io().fonts.get_tex_data_as_rgba32()\n\n\nimgui.new_frame()\nimgui.begin(\"Window\", True)\nimgui.text(\"HelloWorld\")\nimgui.end()\n\nimgui.render()\nimgui.end_frame()\nprint(\"end\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
# coding: utf-8
# In[2]:
print(" sum of n numbers with help of for loop. ")
n = 10
sum = 0
for num in range(0, n+1, 1):
sum = sum+num
print("Output: SUM of first ", n, "numbers is: ", sum )
# In[3]:
print(" sum of n numbers with help of while loop. ")
num = int(input("Enter the value of n: "))
hold = num
sum = 0
if num <= 0:
print("Enter a whole positive number!")
else:
while num > 0:
sum = sum + num
num = num - 1;
# displaying output
print("Sum of first", hold, "natural number is: ",sum)
# In[4]:
print("Take an integer and find whether the number is prime or not")
#input from user
number = int(input("Enter any number: "))
# prime number is always greater than 1
if number > 1:
for i in range(2, number):
if (number % i) == 0:
print(number, "is not a prime number")
break
else: print(number, "is a prime number")
# if the entered number is less than or equal to 1
# then it is not prime number
else: print(number, "is not a prime number")
# In[ ]:
|
normal
|
{
"blob_id": "d3c36ad36c50cd97f2101bc8df99d1961b0ad7ea",
"index": 4078,
"step-1": "<mask token>\n",
"step-2": "print(' sum of n numbers with help of for loop. ')\n<mask token>\nfor num in range(0, n + 1, 1):\n sum = sum + num\nprint('Output: SUM of first ', n, 'numbers is: ', sum)\nprint(' sum of n numbers with help of while loop. ')\n<mask token>\nif num <= 0:\n print('Enter a whole positive number!')\nelse:\n while num > 0:\n sum = sum + num\n num = num - 1\nprint('Sum of first', hold, 'natural number is: ', sum)\nprint('Take an integer and find whether the number is prime or not')\n<mask token>\nif number > 1:\n for i in range(2, number):\n if number % i == 0:\n print(number, 'is not a prime number')\n break\n else:\n print(number, 'is a prime number')\nelse:\n print(number, 'is not a prime number')\n",
"step-3": "print(' sum of n numbers with help of for loop. ')\nn = 10\nsum = 0\nfor num in range(0, n + 1, 1):\n sum = sum + num\nprint('Output: SUM of first ', n, 'numbers is: ', sum)\nprint(' sum of n numbers with help of while loop. ')\nnum = int(input('Enter the value of n: '))\nhold = num\nsum = 0\nif num <= 0:\n print('Enter a whole positive number!')\nelse:\n while num > 0:\n sum = sum + num\n num = num - 1\nprint('Sum of first', hold, 'natural number is: ', sum)\nprint('Take an integer and find whether the number is prime or not')\nnumber = int(input('Enter any number: '))\nif number > 1:\n for i in range(2, number):\n if number % i == 0:\n print(number, 'is not a prime number')\n break\n else:\n print(number, 'is a prime number')\nelse:\n print(number, 'is not a prime number')\n",
"step-4": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[2]:\n\n\nprint(\" sum of n numbers with help of for loop. \")\nn = 10\nsum = 0\nfor num in range(0, n+1, 1):\n sum = sum+num\nprint(\"Output: SUM of first \", n, \"numbers is: \", sum )\n\n\n# In[3]:\n\n\nprint(\" sum of n numbers with help of while loop. \")\nnum = int(input(\"Enter the value of n: \"))\nhold = num \nsum = 0 \n\n\nif num <= 0: \n print(\"Enter a whole positive number!\") \nelse: \n while num > 0: \n sum = sum + num \n num = num - 1;\n # displaying output \nprint(\"Sum of first\", hold, \"natural number is: \",sum)\n\n\n# In[4]:\n\n\nprint(\"Take an integer and find whether the number is prime or not\")\n#input from user\nnumber = int(input(\"Enter any number: \")) \n# prime number is always greater than 1\nif number > 1: \n for i in range(2, number):\n if (number % i) == 0: \n print(number, \"is not a prime number\")\n break \n else: print(number, \"is a prime number\")\n # if the entered number is less than or equal to 1 \n # then it is not prime number \nelse: print(number, \"is not a prime number\")\n\n\n# In[ ]:\n\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class TestGeneral(unittest.TestCase):
def _check_instantiations(self, template, supports_symbolic=True):
default_cls = template[None]
self.assertTrue(template[float] is default_cls)
self.assertTrue(template[AutoDiffXd] is not default_cls)
if supports_symbolic:
self.assertTrue(template[Expression] is not default_cls)
def test_instantiations(self):
self._check_instantiations(Adder_)
self._check_instantiations(AffineSystem_)
self._check_instantiations(ConstantValueSource_)
self._check_instantiations(ConstantVectorSource_)
self._check_instantiations(Demultiplexer_)
self._check_instantiations(DiscreteDerivative_)
self._check_instantiations(DiscreteTimeDelay_)
self._check_instantiations(Gain_)
self._check_instantiations(Integrator_)
self._check_instantiations(LinearSystem_)
self._check_instantiations(LinearTransformDensity_,
supports_symbolic=False)
self._check_instantiations(Multiplexer_)
self._check_instantiations(MultilayerPerceptron_)
self._check_instantiations(PassThrough_)
self._check_instantiations(PortSwitch_)
self._check_instantiations(Saturation_)
self._check_instantiations(SharedPointerSystem_)
self._check_instantiations(Sine_)
self._check_instantiations(StateInterpolatorWithDiscreteDerivative_)
self._check_instantiations(SymbolicVectorSystem_)
self._check_instantiations(TrajectoryAffineSystem_,
supports_symbolic=False)
self._check_instantiations(TrajectoryLinearSystem_,
supports_symbolic=False)
self._check_instantiations(TrajectorySource_)
self._check_instantiations(VectorLogSink_)
self._check_instantiations(WrapToSystem_)
self._check_instantiations(ZeroOrderHold_)
<|reserved_special_token_0|>
def test_linear_affine_system_empty_matrices(self):
def CheckSizes(system, num_states, num_inputs, num_outputs):
self.assertEqual(system.num_continuous_states(), num_states)
self.assertEqual(system.num_inputs(), num_inputs)
self.assertEqual(system.num_outputs(), num_outputs)
system = AffineSystem(y0=[2, 1])
CheckSizes(system, num_states=0, num_inputs=0, num_outputs=2)
system = AffineSystem(D=np.eye(2))
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = LinearSystem(D=np.eye(2))
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = AffineSystem(D=np.eye(2), y0=[1, 2])
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = LinearSystem(B=np.eye(2))
CheckSizes(system, num_states=2, num_inputs=2, num_outputs=0)
def test_linear_system_zero_size(self):
num_x = 0
num_y = 2
num_u = 2
A = np.zeros((num_x, num_x))
B = np.zeros((num_x, num_u))
C = np.zeros((num_y, num_x))
D = np.zeros((num_y, num_u))
self.assertIsNotNone(LinearSystem(A, B, C, D))
@numpy_compare.check_nonsymbolic_types
def test_linear_transform_density(self, T):
dut = LinearTransformDensity_[T](distribution=RandomDistribution.
kGaussian, input_size=3, output_size=3)
w_in = np.array([T(0.5), T(0.1), T(1.5)])
context = dut.CreateDefaultContext()
dut.get_input_port_w_in().FixValue(context, w_in)
self.assertEqual(dut.get_input_port_A().size(), 9)
self.assertEqual(dut.get_input_port_b().size(), 3)
self.assertEqual(dut.get_distribution(), RandomDistribution.kGaussian)
A = np.array([[T(0.5), T(1), T(2)], [T(1), T(2), T(3)], [T(3), T(4),
T(5)]])
dut.FixConstantA(context=context, A=A)
b = np.array([T(1), T(2), T(3)])
dut.FixConstantB(context=context, b=b)
dut.CalcDensity(context=context)
self.assertEqual(dut.get_output_port_w_out().size(), 3)
self.assertEqual(dut.get_output_port_w_out_density().size(), 1)
def test_vector_pass_through(self):
model_value = BasicVector([1.0, 2, 3])
system = PassThrough(vector_size=model_value.size())
context = system.CreateDefaultContext()
system.get_input_port(0).FixValue(context, model_value)
output = system.AllocateOutput()
input_eval = system.EvalVectorInput(context, 0)
compare_value(self, input_eval, model_value)
system.CalcOutput(context, output)
output_value = output.get_vector_data(0)
compare_value(self, output_value, model_value)
def test_default_vector_pass_through(self):
model_value = [1.0, 2, 3]
system = PassThrough(value=model_value)
context = system.CreateDefaultContext()
np.testing.assert_array_equal(model_value, system.get_output_port()
.Eval(context))
def test_abstract_pass_through(self):
model_value = Value('Hello world')
system = PassThrough(abstract_model_value=model_value)
context = system.CreateDefaultContext()
system.get_input_port(0).FixValue(context, model_value)
output = system.AllocateOutput()
input_eval = system.EvalAbstractInput(context, 0)
compare_value(self, input_eval, model_value)
system.CalcOutput(context, output)
output_value = output.get_data(0)
compare_value(self, output_value, model_value)
def test_port_switch(self):
system = PortSwitch(vector_size=2)
a = system.DeclareInputPort(name='a')
system.DeclareInputPort(name='b')
context = system.CreateDefaultContext()
self.assertIsInstance(a, InputPort)
system.get_port_selector_input_port().FixValue(context, a.get_index())
def test_first_order_low_pass_filter(self):
filter1 = FirstOrderLowPassFilter(time_constant=3.0, size=4)
self.assertEqual(filter1.get_time_constant(), 3.0)
alpha = np.array([1, 2, 3])
filter2 = FirstOrderLowPassFilter(time_constants=alpha)
np.testing.assert_array_equal(filter2.get_time_constants_vector(),
alpha)
context = filter2.CreateDefaultContext()
filter2.set_initial_output_value(context, [0.0, -0.2, 0.4])
<|reserved_special_token_0|>
def test_saturation(self):
system = Saturation((0.0, -1.0, 3.0), (1.0, 2.0, 4.0))
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
system.get_input_port(0).FixValue(context, input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).
CopyToVector(), expected))
mytest((-5.0, 5.0, 4.0), (0.0, 2.0, 4.0))
mytest((0.4, 0.0, 3.5), (0.4, 0.0, 3.5))
def test_trajectory_source(self):
ppt = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[2.0, 3.0], [
2.0, 1.0]])
system = TrajectorySource(trajectory=ppt, output_derivative_order=0,
zero_derivatives_beyond_limits=True)
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
context.SetTime(input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).
CopyToVector(), expected))
mytest(0.0, (2.0, 2.0))
mytest(0.5, (2.5, 1.5))
mytest(1.0, (3.0, 1.0))
ppt2 = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[4.0, 6.0],
[4.0, 2.0]])
system.UpdateTrajectory(trajectory=ppt2)
mytest(0.0, (4.0, 4.0))
mytest(0.5, (5.0, 3.0))
mytest(1.0, (6.0, 2.0))
def test_symbolic_vector_system(self):
t = Variable('t')
x = [Variable('x0'), Variable('x1')]
u = [Variable('u0'), Variable('u1')]
system = SymbolicVectorSystem(time=t, state=x, input=u, dynamics=[x
[0] + x[1], t], output=[u[1]], time_period=0.0)
context = system.CreateDefaultContext()
self.assertEqual(context.num_continuous_states(), 2)
self.assertEqual(context.num_discrete_state_groups(), 0)
self.assertEqual(system.get_input_port(0).size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertEqual(context.num_abstract_parameters(), 0)
self.assertEqual(context.num_numeric_parameter_groups(), 0)
self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(x[0] + x[1])
)
self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))
def test_symbolic_vector_system_parameters(self):
t = Variable('t')
x = [Variable('x0'), Variable('x1')]
u = [Variable('u0'), Variable('u1')]
p = [Variable('p0'), Variable('p1')]
system = SymbolicVectorSystem(time=t, state=x, input=u, parameter=p,
dynamics=[p[0] * x[0] + x[1] + p[1], t], output=[u[1]],
time_period=0.0)
context = system.CreateDefaultContext()
self.assertEqual(context.num_continuous_states(), 2)
self.assertEqual(context.num_discrete_state_groups(), 0)
self.assertEqual(system.get_input_port(0).size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertEqual(context.num_abstract_parameters(), 0)
self.assertEqual(context.num_numeric_parameter_groups(), 1)
self.assertEqual(context.get_numeric_parameter(0).size(), 2)
self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(p[0] * x
[0] + x[1] + p[1]))
self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))
def test_wrap_to_system(self):
system = WrapToSystem(2)
system.set_interval(1, 1.0, 2.0)
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
system.get_input_port(0).FixValue(context, input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).
CopyToVector(), expected))
mytest((-1.5, 0.5), (-1.5, 1.5))
mytest((0.2, 0.3), (0.2, 1.3))
def test_demultiplexer(self):
demux = Demultiplexer(size=4)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), 4)
numpy_compare.assert_equal(demux.get_output_ports_sizes(), [1, 1, 1, 1]
)
input_vec = np.array([1.0, 2.0, 3.0, 4.0])
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
for i in range(4):
self.assertTrue(np.allclose(output.get_vector_data(i).get_value
(), input_vec[i]))
demux = Demultiplexer(size=4, output_ports_size=2)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), 2)
numpy_compare.assert_equal(demux.get_output_ports_sizes(), [2, 2])
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
for i in range(2):
self.assertTrue(np.allclose(output.get_vector_data(i).get_value
(), input_vec[2 * i:2 * i + 2]))
output_ports_sizes = np.array([1, 2, 1])
num_output_ports = output_ports_sizes.size
input_vec = np.array([1.0, 2.0, 3.0, 4.0])
demux = Demultiplexer(output_ports_sizes=output_ports_sizes)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), num_output_ports)
numpy_compare.assert_equal(demux.get_output_ports_sizes(),
output_ports_sizes)
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
output_port_start = 0
for i in range(num_output_ports):
output_port_size = output.get_vector_data(i).size()
self.assertTrue(np.allclose(output.get_vector_data(i).get_value
(), input_vec[output_port_start:output_port_start +
output_port_size]))
output_port_start += output_port_size
<|reserved_special_token_0|>
def test_multilayer_perceptron(self):
mlp = MultilayerPerceptron(layers=[1, 2, 3], activation_type=
PerceptronActivationType.kReLU)
self.assertEqual(mlp.get_input_port().size(), 1)
self.assertEqual(mlp.get_output_port().size(), 3)
context = mlp.CreateDefaultContext()
params = np.zeros((mlp.num_parameters(), 1))
self.assertEqual(mlp.num_parameters(), 13)
self.assertEqual(mlp.layers(), [1, 2, 3])
self.assertEqual(mlp.activation_type(layer=0),
PerceptronActivationType.kReLU)
self.assertEqual(len(mlp.GetParameters(context=context)), mlp.
num_parameters())
mlp.SetWeights(context=context, layer=0, W=np.array([[1], [2]]))
mlp.SetBiases(context=context, layer=0, b=[3, 4])
np.testing.assert_array_equal(mlp.GetWeights(context=context, layer
=0), np.array([[1], [2]]))
np.testing.assert_array_equal(mlp.GetBiases(context=context, layer=
0), np.array([3, 4]))
params = np.zeros(mlp.num_parameters())
mlp.SetWeights(params=params, layer=0, W=np.array([[1], [2]]))
mlp.SetBiases(params=params, layer=0, b=[3, 4])
np.testing.assert_array_equal(mlp.GetWeights(params=params, layer=0
), np.array([[1], [2]]))
np.testing.assert_array_equal(mlp.GetBiases(params=params, layer=0),
np.array([3, 4]))
mutable_params = mlp.GetMutableParameters(context=context)
mutable_params[:] = 3.0
np.testing.assert_array_equal(mlp.GetParameters(context), np.full(
mlp.num_parameters(), 3.0))
global called_loss
called_loss = False
def silly_loss(Y, dloss_dY):
global called_loss
called_loss = True
dloss_dY[:] = 1
return Y.sum()
dloss_dparams = np.zeros((13,))
generator = RandomGenerator(23)
mlp.SetRandomContext(context, generator)
mlp.Backpropagation(context=context, X=np.array([1, 3, 4]).reshape(
(1, 3)), loss=silly_loss, dloss_dparams=dloss_dparams)
self.assertTrue(called_loss)
self.assertTrue(dloss_dparams.any())
dloss_dparams = np.zeros((13,))
mlp.BackpropagationMeanSquaredError(context=context, X=np.array([1,
3, 4]).reshape((1, 3)), Y_desired=np.eye(3), dloss_dparams=
dloss_dparams)
self.assertTrue(dloss_dparams.any())
Y = np.asfortranarray(np.eye(3))
mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]), Y=Y)
self.assertFalse(np.allclose(Y, np.eye(3)))
Y2 = mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]))
np.testing.assert_array_equal(Y, Y2)
mlp2 = MultilayerPerceptron(layers=[3, 2, 1], activation_types=[
PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])
self.assertEqual(mlp2.activation_type(0), PerceptronActivationType.
kReLU)
self.assertEqual(mlp2.activation_type(1), PerceptronActivationType.
kTanh)
Y = np.asfortranarray(np.full((1, 3), 2.4))
dYdX = np.asfortranarray(np.full((3, 3), 5.3))
context2 = mlp2.CreateDefaultContext()
mlp2.BatchOutput(context=context2, X=np.eye(3), Y=Y, dYdX=dYdX)
np.testing.assert_array_almost_equal(Y, np.zeros((1, 3)))
np.testing.assert_array_almost_equal(dYdX, np.zeros((3, 3)))
mlp = MultilayerPerceptron(use_sin_cos_for_input=[True, False],
remaining_layers=[3, 2], activation_types=[
PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])
self.assertEqual(mlp.get_input_port().size(), 2)
np.testing.assert_array_equal(mlp.layers(), [3, 3, 2])
def test_random_source(self):
source = RandomSource(distribution=RandomDistribution.kUniform,
num_outputs=2, sampling_interval_sec=0.01)
self.assertEqual(source.get_output_port(0).size(), 2)
builder = DiagramBuilder()
AddRandomInputs(sampling_interval_sec=0.01, builder=builder)
builder_ad = DiagramBuilder_[AutoDiffXd]()
AddRandomInputs(sampling_interval_sec=0.01, builder=builder_ad)
def test_constant_vector_source(self):
source = ConstantVectorSource(source_value=[1.0, 2.0])
context = source.CreateDefaultContext()
source.get_source_value(context)
source.get_mutable_source_value(context)
<|reserved_special_token_0|>
def test_shared_pointer_system_ctor(self):
dut = SharedPointerSystem(value_to_hold=[1, 2, 3])
readback = dut.get()
self.assertListEqual(readback, [1, 2, 3])
del dut
self.assertListEqual(readback, [1, 2, 3])
<|reserved_special_token_0|>
def test_sine(self):
sine_source = Sine(amplitude=1, frequency=2, phase=3, size=1,
is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 1)
self.assertEqual(sine_source.get_output_port(1).size(), 1)
self.assertEqual(sine_source.get_output_port(2).size(), 1)
sine_source = Sine(amplitude=1, frequency=2, phase=3, size=3,
is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 3)
self.assertEqual(sine_source.get_output_port(1).size(), 3)
self.assertEqual(sine_source.get_output_port(2).size(), 3)
sine_source = Sine(amplitudes=np.ones(2), frequencies=np.ones(2),
phases=np.ones(2), is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 2)
self.assertEqual(sine_source.get_output_port(1).size(), 2)
self.assertEqual(sine_source.get_output_port(2).size(), 2)
def test_discrete_derivative(self):
discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=0.5)
self.assertEqual(discrete_derivative.get_input_port(0).size(), 5)
self.assertEqual(discrete_derivative.get_output_port(0).size(), 5)
self.assertEqual(discrete_derivative.time_step(), 0.5)
self.assertTrue(discrete_derivative.suppress_initial_transient())
discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=
0.5, suppress_initial_transient=False)
self.assertFalse(discrete_derivative.suppress_initial_transient())
<|reserved_special_token_0|>
@numpy_compare.check_nonsymbolic_types
def test_log_vector_output(self, T):
builder = DiagramBuilder_[T]()
kSize = 1
integrator = builder.AddSystem(Integrator_[T](kSize))
port = integrator.get_output_port(0)
loggers = []
loggers.append(LogVectorOutput(port, builder))
loggers.append(LogVectorOutput(src=port, builder=builder))
loggers.append(LogVectorOutput(port, builder, 0.125))
loggers.append(LogVectorOutput(src=port, builder=builder,
publish_period=0.125))
loggers.append(LogVectorOutput(port, builder, {TriggerType.kForced}))
loggers.append(LogVectorOutput(src=port, builder=builder,
publish_triggers={TriggerType.kForced}))
loggers.append(LogVectorOutput(port, builder, {TriggerType.
kPeriodic}, 0.125))
loggers.append(LogVectorOutput(src=port, builder=builder,
publish_triggers={TriggerType.kPeriodic}, publish_period=0.125))
diagram = builder.Build()
context = diagram.CreateDefaultContext()
self.assertTrue(all(logger.FindLog(context).num_samples() == 0 for
logger in loggers))
<|reserved_special_token_0|>
@numpy_compare.check_nonsymbolic_types
def test_vector_log_sink(self, T):
builder = DiagramBuilder_[T]()
kSize = 1
constructors = [VectorLogSink_[T]]
loggers = []
if T == float:
constructors.append(VectorLogSink)
for constructor in constructors:
loggers.append(builder.AddSystem(constructor(kSize)))
loggers.append(builder.AddSystem(constructor(input_size=kSize)))
loggers.append(builder.AddSystem(constructor(kSize, 0.125)))
loggers.append(builder.AddSystem(constructor(input_size=kSize,
publish_period=0.125)))
loggers.append(builder.AddSystem(constructor(kSize, {
TriggerType.kForced})))
loggers.append(builder.AddSystem(constructor(input_size=kSize,
publish_triggers={TriggerType.kForced})))
loggers.append(builder.AddSystem(constructor(kSize, {
TriggerType.kPeriodic}, 0.125)))
loggers.append(builder.AddSystem(constructor(input_size=kSize,
publish_triggers={TriggerType.kPeriodic}, publish_period=
0.125)))
diagram = builder.Build()
context = diagram.CreateDefaultContext()
self.assertTrue(all(logger.FindLog(context) == logger.
FindMutableLog(context) for logger in loggers))
loggers_and_contexts = [(x, x.GetMyContextFromRoot(context)) for x in
loggers]
self.assertTrue(all(logger.GetLog(logger_context) == logger.
GetMutableLog(logger_context) for logger, logger_context in
loggers_and_contexts))
self.assertTrue(all(logger.GetLog(logger_context) == logger.FindLog
(context) for logger, logger_context in loggers_and_contexts))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestGeneral(unittest.TestCase):
def _check_instantiations(self, template, supports_symbolic=True):
default_cls = template[None]
self.assertTrue(template[float] is default_cls)
self.assertTrue(template[AutoDiffXd] is not default_cls)
if supports_symbolic:
self.assertTrue(template[Expression] is not default_cls)
def test_instantiations(self):
self._check_instantiations(Adder_)
self._check_instantiations(AffineSystem_)
self._check_instantiations(ConstantValueSource_)
self._check_instantiations(ConstantVectorSource_)
self._check_instantiations(Demultiplexer_)
self._check_instantiations(DiscreteDerivative_)
self._check_instantiations(DiscreteTimeDelay_)
self._check_instantiations(Gain_)
self._check_instantiations(Integrator_)
self._check_instantiations(LinearSystem_)
self._check_instantiations(LinearTransformDensity_,
supports_symbolic=False)
self._check_instantiations(Multiplexer_)
self._check_instantiations(MultilayerPerceptron_)
self._check_instantiations(PassThrough_)
self._check_instantiations(PortSwitch_)
self._check_instantiations(Saturation_)
self._check_instantiations(SharedPointerSystem_)
self._check_instantiations(Sine_)
self._check_instantiations(StateInterpolatorWithDiscreteDerivative_)
self._check_instantiations(SymbolicVectorSystem_)
self._check_instantiations(TrajectoryAffineSystem_,
supports_symbolic=False)
self._check_instantiations(TrajectoryLinearSystem_,
supports_symbolic=False)
self._check_instantiations(TrajectorySource_)
self._check_instantiations(VectorLogSink_)
self._check_instantiations(WrapToSystem_)
self._check_instantiations(ZeroOrderHold_)
<|reserved_special_token_0|>
def test_linear_affine_system_empty_matrices(self):
def CheckSizes(system, num_states, num_inputs, num_outputs):
self.assertEqual(system.num_continuous_states(), num_states)
self.assertEqual(system.num_inputs(), num_inputs)
self.assertEqual(system.num_outputs(), num_outputs)
system = AffineSystem(y0=[2, 1])
CheckSizes(system, num_states=0, num_inputs=0, num_outputs=2)
system = AffineSystem(D=np.eye(2))
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = LinearSystem(D=np.eye(2))
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = AffineSystem(D=np.eye(2), y0=[1, 2])
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = LinearSystem(B=np.eye(2))
CheckSizes(system, num_states=2, num_inputs=2, num_outputs=0)
def test_linear_system_zero_size(self):
num_x = 0
num_y = 2
num_u = 2
A = np.zeros((num_x, num_x))
B = np.zeros((num_x, num_u))
C = np.zeros((num_y, num_x))
D = np.zeros((num_y, num_u))
self.assertIsNotNone(LinearSystem(A, B, C, D))
@numpy_compare.check_nonsymbolic_types
def test_linear_transform_density(self, T):
dut = LinearTransformDensity_[T](distribution=RandomDistribution.
kGaussian, input_size=3, output_size=3)
w_in = np.array([T(0.5), T(0.1), T(1.5)])
context = dut.CreateDefaultContext()
dut.get_input_port_w_in().FixValue(context, w_in)
self.assertEqual(dut.get_input_port_A().size(), 9)
self.assertEqual(dut.get_input_port_b().size(), 3)
self.assertEqual(dut.get_distribution(), RandomDistribution.kGaussian)
A = np.array([[T(0.5), T(1), T(2)], [T(1), T(2), T(3)], [T(3), T(4),
T(5)]])
dut.FixConstantA(context=context, A=A)
b = np.array([T(1), T(2), T(3)])
dut.FixConstantB(context=context, b=b)
dut.CalcDensity(context=context)
self.assertEqual(dut.get_output_port_w_out().size(), 3)
self.assertEqual(dut.get_output_port_w_out_density().size(), 1)
def test_vector_pass_through(self):
model_value = BasicVector([1.0, 2, 3])
system = PassThrough(vector_size=model_value.size())
context = system.CreateDefaultContext()
system.get_input_port(0).FixValue(context, model_value)
output = system.AllocateOutput()
input_eval = system.EvalVectorInput(context, 0)
compare_value(self, input_eval, model_value)
system.CalcOutput(context, output)
output_value = output.get_vector_data(0)
compare_value(self, output_value, model_value)
def test_default_vector_pass_through(self):
model_value = [1.0, 2, 3]
system = PassThrough(value=model_value)
context = system.CreateDefaultContext()
np.testing.assert_array_equal(model_value, system.get_output_port()
.Eval(context))
def test_abstract_pass_through(self):
model_value = Value('Hello world')
system = PassThrough(abstract_model_value=model_value)
context = system.CreateDefaultContext()
system.get_input_port(0).FixValue(context, model_value)
output = system.AllocateOutput()
input_eval = system.EvalAbstractInput(context, 0)
compare_value(self, input_eval, model_value)
system.CalcOutput(context, output)
output_value = output.get_data(0)
compare_value(self, output_value, model_value)
def test_port_switch(self):
system = PortSwitch(vector_size=2)
a = system.DeclareInputPort(name='a')
system.DeclareInputPort(name='b')
context = system.CreateDefaultContext()
self.assertIsInstance(a, InputPort)
system.get_port_selector_input_port().FixValue(context, a.get_index())
def test_first_order_low_pass_filter(self):
filter1 = FirstOrderLowPassFilter(time_constant=3.0, size=4)
self.assertEqual(filter1.get_time_constant(), 3.0)
alpha = np.array([1, 2, 3])
filter2 = FirstOrderLowPassFilter(time_constants=alpha)
np.testing.assert_array_equal(filter2.get_time_constants_vector(),
alpha)
context = filter2.CreateDefaultContext()
filter2.set_initial_output_value(context, [0.0, -0.2, 0.4])
<|reserved_special_token_0|>
def test_saturation(self):
system = Saturation((0.0, -1.0, 3.0), (1.0, 2.0, 4.0))
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
system.get_input_port(0).FixValue(context, input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).
CopyToVector(), expected))
mytest((-5.0, 5.0, 4.0), (0.0, 2.0, 4.0))
mytest((0.4, 0.0, 3.5), (0.4, 0.0, 3.5))
def test_trajectory_source(self):
ppt = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[2.0, 3.0], [
2.0, 1.0]])
system = TrajectorySource(trajectory=ppt, output_derivative_order=0,
zero_derivatives_beyond_limits=True)
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
context.SetTime(input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).
CopyToVector(), expected))
mytest(0.0, (2.0, 2.0))
mytest(0.5, (2.5, 1.5))
mytest(1.0, (3.0, 1.0))
ppt2 = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[4.0, 6.0],
[4.0, 2.0]])
system.UpdateTrajectory(trajectory=ppt2)
mytest(0.0, (4.0, 4.0))
mytest(0.5, (5.0, 3.0))
mytest(1.0, (6.0, 2.0))
def test_symbolic_vector_system(self):
t = Variable('t')
x = [Variable('x0'), Variable('x1')]
u = [Variable('u0'), Variable('u1')]
system = SymbolicVectorSystem(time=t, state=x, input=u, dynamics=[x
[0] + x[1], t], output=[u[1]], time_period=0.0)
context = system.CreateDefaultContext()
self.assertEqual(context.num_continuous_states(), 2)
self.assertEqual(context.num_discrete_state_groups(), 0)
self.assertEqual(system.get_input_port(0).size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertEqual(context.num_abstract_parameters(), 0)
self.assertEqual(context.num_numeric_parameter_groups(), 0)
self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(x[0] + x[1])
)
self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))
def test_symbolic_vector_system_parameters(self):
t = Variable('t')
x = [Variable('x0'), Variable('x1')]
u = [Variable('u0'), Variable('u1')]
p = [Variable('p0'), Variable('p1')]
system = SymbolicVectorSystem(time=t, state=x, input=u, parameter=p,
dynamics=[p[0] * x[0] + x[1] + p[1], t], output=[u[1]],
time_period=0.0)
context = system.CreateDefaultContext()
self.assertEqual(context.num_continuous_states(), 2)
self.assertEqual(context.num_discrete_state_groups(), 0)
self.assertEqual(system.get_input_port(0).size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertEqual(context.num_abstract_parameters(), 0)
self.assertEqual(context.num_numeric_parameter_groups(), 1)
self.assertEqual(context.get_numeric_parameter(0).size(), 2)
self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(p[0] * x
[0] + x[1] + p[1]))
self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))
def test_wrap_to_system(self):
system = WrapToSystem(2)
system.set_interval(1, 1.0, 2.0)
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
system.get_input_port(0).FixValue(context, input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).
CopyToVector(), expected))
mytest((-1.5, 0.5), (-1.5, 1.5))
mytest((0.2, 0.3), (0.2, 1.3))
def test_demultiplexer(self):
demux = Demultiplexer(size=4)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), 4)
numpy_compare.assert_equal(demux.get_output_ports_sizes(), [1, 1, 1, 1]
)
input_vec = np.array([1.0, 2.0, 3.0, 4.0])
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
for i in range(4):
self.assertTrue(np.allclose(output.get_vector_data(i).get_value
(), input_vec[i]))
demux = Demultiplexer(size=4, output_ports_size=2)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), 2)
numpy_compare.assert_equal(demux.get_output_ports_sizes(), [2, 2])
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
for i in range(2):
self.assertTrue(np.allclose(output.get_vector_data(i).get_value
(), input_vec[2 * i:2 * i + 2]))
output_ports_sizes = np.array([1, 2, 1])
num_output_ports = output_ports_sizes.size
input_vec = np.array([1.0, 2.0, 3.0, 4.0])
demux = Demultiplexer(output_ports_sizes=output_ports_sizes)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), num_output_ports)
numpy_compare.assert_equal(demux.get_output_ports_sizes(),
output_ports_sizes)
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
output_port_start = 0
for i in range(num_output_ports):
output_port_size = output.get_vector_data(i).size()
self.assertTrue(np.allclose(output.get_vector_data(i).get_value
(), input_vec[output_port_start:output_port_start +
output_port_size]))
output_port_start += output_port_size
<|reserved_special_token_0|>
def test_multilayer_perceptron(self):
mlp = MultilayerPerceptron(layers=[1, 2, 3], activation_type=
PerceptronActivationType.kReLU)
self.assertEqual(mlp.get_input_port().size(), 1)
self.assertEqual(mlp.get_output_port().size(), 3)
context = mlp.CreateDefaultContext()
params = np.zeros((mlp.num_parameters(), 1))
self.assertEqual(mlp.num_parameters(), 13)
self.assertEqual(mlp.layers(), [1, 2, 3])
self.assertEqual(mlp.activation_type(layer=0),
PerceptronActivationType.kReLU)
self.assertEqual(len(mlp.GetParameters(context=context)), mlp.
num_parameters())
mlp.SetWeights(context=context, layer=0, W=np.array([[1], [2]]))
mlp.SetBiases(context=context, layer=0, b=[3, 4])
np.testing.assert_array_equal(mlp.GetWeights(context=context, layer
=0), np.array([[1], [2]]))
np.testing.assert_array_equal(mlp.GetBiases(context=context, layer=
0), np.array([3, 4]))
params = np.zeros(mlp.num_parameters())
mlp.SetWeights(params=params, layer=0, W=np.array([[1], [2]]))
mlp.SetBiases(params=params, layer=0, b=[3, 4])
np.testing.assert_array_equal(mlp.GetWeights(params=params, layer=0
), np.array([[1], [2]]))
np.testing.assert_array_equal(mlp.GetBiases(params=params, layer=0),
np.array([3, 4]))
mutable_params = mlp.GetMutableParameters(context=context)
mutable_params[:] = 3.0
np.testing.assert_array_equal(mlp.GetParameters(context), np.full(
mlp.num_parameters(), 3.0))
global called_loss
called_loss = False
def silly_loss(Y, dloss_dY):
global called_loss
called_loss = True
dloss_dY[:] = 1
return Y.sum()
dloss_dparams = np.zeros((13,))
generator = RandomGenerator(23)
mlp.SetRandomContext(context, generator)
mlp.Backpropagation(context=context, X=np.array([1, 3, 4]).reshape(
(1, 3)), loss=silly_loss, dloss_dparams=dloss_dparams)
self.assertTrue(called_loss)
self.assertTrue(dloss_dparams.any())
dloss_dparams = np.zeros((13,))
mlp.BackpropagationMeanSquaredError(context=context, X=np.array([1,
3, 4]).reshape((1, 3)), Y_desired=np.eye(3), dloss_dparams=
dloss_dparams)
self.assertTrue(dloss_dparams.any())
Y = np.asfortranarray(np.eye(3))
mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]), Y=Y)
self.assertFalse(np.allclose(Y, np.eye(3)))
Y2 = mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]))
np.testing.assert_array_equal(Y, Y2)
mlp2 = MultilayerPerceptron(layers=[3, 2, 1], activation_types=[
PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])
self.assertEqual(mlp2.activation_type(0), PerceptronActivationType.
kReLU)
self.assertEqual(mlp2.activation_type(1), PerceptronActivationType.
kTanh)
Y = np.asfortranarray(np.full((1, 3), 2.4))
dYdX = np.asfortranarray(np.full((3, 3), 5.3))
context2 = mlp2.CreateDefaultContext()
mlp2.BatchOutput(context=context2, X=np.eye(3), Y=Y, dYdX=dYdX)
np.testing.assert_array_almost_equal(Y, np.zeros((1, 3)))
np.testing.assert_array_almost_equal(dYdX, np.zeros((3, 3)))
mlp = MultilayerPerceptron(use_sin_cos_for_input=[True, False],
remaining_layers=[3, 2], activation_types=[
PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])
self.assertEqual(mlp.get_input_port().size(), 2)
np.testing.assert_array_equal(mlp.layers(), [3, 3, 2])
def test_random_source(self):
source = RandomSource(distribution=RandomDistribution.kUniform,
num_outputs=2, sampling_interval_sec=0.01)
self.assertEqual(source.get_output_port(0).size(), 2)
builder = DiagramBuilder()
AddRandomInputs(sampling_interval_sec=0.01, builder=builder)
builder_ad = DiagramBuilder_[AutoDiffXd]()
AddRandomInputs(sampling_interval_sec=0.01, builder=builder_ad)
def test_constant_vector_source(self):
source = ConstantVectorSource(source_value=[1.0, 2.0])
context = source.CreateDefaultContext()
source.get_source_value(context)
source.get_mutable_source_value(context)
<|reserved_special_token_0|>
def test_shared_pointer_system_ctor(self):
dut = SharedPointerSystem(value_to_hold=[1, 2, 3])
readback = dut.get()
self.assertListEqual(readback, [1, 2, 3])
del dut
self.assertListEqual(readback, [1, 2, 3])
def test_shared_pointer_system_builder(self):
builder = DiagramBuilder()
self.assertListEqual(SharedPointerSystem.AddToBuilder(builder=
builder, value_to_hold=[1, 2, 3]), [1, 2, 3])
diagram = builder.Build()
del builder
readback = diagram.GetSystems()[0].get()
self.assertListEqual(readback, [1, 2, 3])
del diagram
self.assertListEqual(readback, [1, 2, 3])
def test_sine(self):
sine_source = Sine(amplitude=1, frequency=2, phase=3, size=1,
is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 1)
self.assertEqual(sine_source.get_output_port(1).size(), 1)
self.assertEqual(sine_source.get_output_port(2).size(), 1)
sine_source = Sine(amplitude=1, frequency=2, phase=3, size=3,
is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 3)
self.assertEqual(sine_source.get_output_port(1).size(), 3)
self.assertEqual(sine_source.get_output_port(2).size(), 3)
sine_source = Sine(amplitudes=np.ones(2), frequencies=np.ones(2),
phases=np.ones(2), is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 2)
self.assertEqual(sine_source.get_output_port(1).size(), 2)
self.assertEqual(sine_source.get_output_port(2).size(), 2)
def test_discrete_derivative(self):
discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=0.5)
self.assertEqual(discrete_derivative.get_input_port(0).size(), 5)
self.assertEqual(discrete_derivative.get_output_port(0).size(), 5)
self.assertEqual(discrete_derivative.time_step(), 0.5)
self.assertTrue(discrete_derivative.suppress_initial_transient())
discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=
0.5, suppress_initial_transient=False)
self.assertFalse(discrete_derivative.suppress_initial_transient())
<|reserved_special_token_0|>
@numpy_compare.check_nonsymbolic_types
def test_log_vector_output(self, T):
builder = DiagramBuilder_[T]()
kSize = 1
integrator = builder.AddSystem(Integrator_[T](kSize))
port = integrator.get_output_port(0)
loggers = []
loggers.append(LogVectorOutput(port, builder))
loggers.append(LogVectorOutput(src=port, builder=builder))
loggers.append(LogVectorOutput(port, builder, 0.125))
loggers.append(LogVectorOutput(src=port, builder=builder,
publish_period=0.125))
loggers.append(LogVectorOutput(port, builder, {TriggerType.kForced}))
loggers.append(LogVectorOutput(src=port, builder=builder,
publish_triggers={TriggerType.kForced}))
loggers.append(LogVectorOutput(port, builder, {TriggerType.
kPeriodic}, 0.125))
loggers.append(LogVectorOutput(src=port, builder=builder,
publish_triggers={TriggerType.kPeriodic}, publish_period=0.125))
diagram = builder.Build()
context = diagram.CreateDefaultContext()
self.assertTrue(all(logger.FindLog(context).num_samples() == 0 for
logger in loggers))
<|reserved_special_token_0|>
@numpy_compare.check_nonsymbolic_types
def test_vector_log_sink(self, T):
builder = DiagramBuilder_[T]()
kSize = 1
constructors = [VectorLogSink_[T]]
loggers = []
if T == float:
constructors.append(VectorLogSink)
for constructor in constructors:
loggers.append(builder.AddSystem(constructor(kSize)))
loggers.append(builder.AddSystem(constructor(input_size=kSize)))
loggers.append(builder.AddSystem(constructor(kSize, 0.125)))
loggers.append(builder.AddSystem(constructor(input_size=kSize,
publish_period=0.125)))
loggers.append(builder.AddSystem(constructor(kSize, {
TriggerType.kForced})))
loggers.append(builder.AddSystem(constructor(input_size=kSize,
publish_triggers={TriggerType.kForced})))
loggers.append(builder.AddSystem(constructor(kSize, {
TriggerType.kPeriodic}, 0.125)))
loggers.append(builder.AddSystem(constructor(input_size=kSize,
publish_triggers={TriggerType.kPeriodic}, publish_period=
0.125)))
diagram = builder.Build()
context = diagram.CreateDefaultContext()
self.assertTrue(all(logger.FindLog(context) == logger.
FindMutableLog(context) for logger in loggers))
loggers_and_contexts = [(x, x.GetMyContextFromRoot(context)) for x in
loggers]
self.assertTrue(all(logger.GetLog(logger_context) == logger.
GetMutableLog(logger_context) for logger, logger_context in
loggers_and_contexts))
self.assertTrue(all(logger.GetLog(logger_context) == logger.FindLog
(context) for logger, logger_context in loggers_and_contexts))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestGeneral(unittest.TestCase):
def _check_instantiations(self, template, supports_symbolic=True):
default_cls = template[None]
self.assertTrue(template[float] is default_cls)
self.assertTrue(template[AutoDiffXd] is not default_cls)
if supports_symbolic:
self.assertTrue(template[Expression] is not default_cls)
def test_instantiations(self):
self._check_instantiations(Adder_)
self._check_instantiations(AffineSystem_)
self._check_instantiations(ConstantValueSource_)
self._check_instantiations(ConstantVectorSource_)
self._check_instantiations(Demultiplexer_)
self._check_instantiations(DiscreteDerivative_)
self._check_instantiations(DiscreteTimeDelay_)
self._check_instantiations(Gain_)
self._check_instantiations(Integrator_)
self._check_instantiations(LinearSystem_)
self._check_instantiations(LinearTransformDensity_,
supports_symbolic=False)
self._check_instantiations(Multiplexer_)
self._check_instantiations(MultilayerPerceptron_)
self._check_instantiations(PassThrough_)
self._check_instantiations(PortSwitch_)
self._check_instantiations(Saturation_)
self._check_instantiations(SharedPointerSystem_)
self._check_instantiations(Sine_)
self._check_instantiations(StateInterpolatorWithDiscreteDerivative_)
self._check_instantiations(SymbolicVectorSystem_)
self._check_instantiations(TrajectoryAffineSystem_,
supports_symbolic=False)
self._check_instantiations(TrajectoryLinearSystem_,
supports_symbolic=False)
self._check_instantiations(TrajectorySource_)
self._check_instantiations(VectorLogSink_)
self._check_instantiations(WrapToSystem_)
self._check_instantiations(ZeroOrderHold_)
def test_linear_affine_system(self):
A = np.identity(2)
B = np.array([[0], [1]])
f0 = np.array([[0], [0]])
C = np.array([[0, 1]])
D = [1]
y0 = [0]
system = LinearSystem(A, B, C, D)
context = system.CreateDefaultContext()
self.assertEqual(system.get_input_port(0).size(), 1)
self.assertEqual(context.get_mutable_continuous_state_vector().size
(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertTrue((system.A() == A).all())
self.assertTrue((system.B() == B).all())
self.assertTrue((system.f0() == f0).all())
self.assertTrue((system.C() == C).all())
self.assertEqual(system.D(), D)
self.assertEqual(system.y0(), y0)
self.assertEqual(system.time_period(), 0.0)
x0 = np.array([1, 2])
system.configure_default_state(x0=x0)
system.SetDefaultContext(context)
np.testing.assert_equal(context.get_continuous_state_vector().
CopyToVector(), x0)
generator = RandomGenerator()
system.SetRandomContext(context, generator)
np.testing.assert_equal(context.get_continuous_state_vector().
CopyToVector(), x0)
system.configure_random_state(covariance=np.eye(2))
system.SetRandomContext(context, generator)
self.assertNotEqual(context.get_continuous_state_vector().
CopyToVector()[1], x0[1])
Co = ControllabilityMatrix(system)
self.assertEqual(Co.shape, (2, 2))
self.assertFalse(IsControllable(system))
self.assertFalse(IsControllable(system, 1e-06))
self.assertFalse(IsStabilizable(sys=system))
self.assertFalse(IsStabilizable(sys=system, threshold=1e-06))
Ob = ObservabilityMatrix(system)
self.assertEqual(Ob.shape, (2, 2))
self.assertFalse(IsObservable(system))
self.assertFalse(IsDetectable(sys=system))
self.assertFalse(IsDetectable(sys=system, threshold=1e-06))
system = AffineSystem(A, B, f0, C, D, y0, 0.1)
self.assertEqual(system.get_input_port(0), system.get_input_port())
self.assertEqual(system.get_output_port(0), system.get_output_port())
context = system.CreateDefaultContext()
self.assertEqual(system.get_input_port(0).size(), 1)
self.assertEqual(context.get_discrete_state_vector().size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertTrue((system.A() == A).all())
self.assertTrue((system.B() == B).all())
self.assertTrue((system.f0() == f0).all())
self.assertTrue((system.C() == C).all())
self.assertEqual(system.D(), D)
self.assertEqual(system.y0(), y0)
self.assertEqual(system.time_period(), 0.1)
system.get_input_port(0).FixValue(context, 0)
linearized = Linearize(system, context)
self.assertTrue((linearized.A() == A).all())
taylor = FirstOrderTaylorApproximation(system, context)
self.assertTrue((taylor.y0() == y0).all())
new_A = np.array([[1, 2], [3, 4]])
new_B = np.array([[5], [6]])
new_f0 = np.array([[7], [8]])
new_C = np.array([[9, 10]])
new_D = np.array([[11]])
new_y0 = np.array([12])
system.UpdateCoefficients(A=new_A, B=new_B, f0=new_f0, C=new_C, D=
new_D, y0=new_y0)
np.testing.assert_equal(new_A, system.A())
np.testing.assert_equal(new_B, system.B())
np.testing.assert_equal(new_f0.flatten(), system.f0())
np.testing.assert_equal(new_C, system.C())
np.testing.assert_equal(new_D, system.D())
np.testing.assert_equal(new_y0, system.y0())
system = MatrixGain(D=A)
self.assertTrue((system.D() == A).all())
system = TrajectoryAffineSystem(PiecewisePolynomial(A),
PiecewisePolynomial(B), PiecewisePolynomial(f0),
PiecewisePolynomial(C), PiecewisePolynomial(D),
PiecewisePolynomial(y0), 0.1)
self.assertEqual(system.get_input_port(0), system.get_input_port())
self.assertEqual(system.get_output_port(0), system.get_output_port())
context = system.CreateDefaultContext()
self.assertEqual(system.get_input_port(0).size(), 1)
self.assertEqual(context.get_discrete_state_vector().size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
for t in np.linspace(0.0, 1.0, 5):
self.assertTrue((system.A(t) == A).all())
self.assertTrue((system.B(t) == B).all())
self.assertTrue((system.f0(t) == f0).all())
self.assertTrue((system.C(t) == C).all())
self.assertEqual(system.D(t), D)
self.assertEqual(system.y0(t), y0)
self.assertEqual(system.time_period(), 0.1)
x0 = np.array([1, 2])
system.configure_default_state(x0=x0)
system.SetDefaultContext(context)
np.testing.assert_equal(context.get_discrete_state_vector().
CopyToVector(), x0)
generator = RandomGenerator()
system.SetRandomContext(context, generator)
np.testing.assert_equal(context.get_discrete_state_vector().
CopyToVector(), x0)
system.configure_random_state(covariance=np.eye(2))
system.SetRandomContext(context, generator)
self.assertNotEqual(context.get_discrete_state_vector().
CopyToVector()[1], x0[1])
system = TrajectoryLinearSystem(A=PiecewisePolynomial(A), B=
PiecewisePolynomial(B), C=PiecewisePolynomial(C), D=
PiecewisePolynomial(D), time_period=0.1)
self.assertEqual(system.time_period(), 0.1)
system.configure_default_state(x0=np.array([1, 2]))
system.configure_random_state(covariance=np.eye(2))
def test_linear_affine_system_empty_matrices(self):
def CheckSizes(system, num_states, num_inputs, num_outputs):
self.assertEqual(system.num_continuous_states(), num_states)
self.assertEqual(system.num_inputs(), num_inputs)
self.assertEqual(system.num_outputs(), num_outputs)
system = AffineSystem(y0=[2, 1])
CheckSizes(system, num_states=0, num_inputs=0, num_outputs=2)
system = AffineSystem(D=np.eye(2))
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = LinearSystem(D=np.eye(2))
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = AffineSystem(D=np.eye(2), y0=[1, 2])
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = LinearSystem(B=np.eye(2))
CheckSizes(system, num_states=2, num_inputs=2, num_outputs=0)
def test_linear_system_zero_size(self):
num_x = 0
num_y = 2
num_u = 2
A = np.zeros((num_x, num_x))
B = np.zeros((num_x, num_u))
C = np.zeros((num_y, num_x))
D = np.zeros((num_y, num_u))
self.assertIsNotNone(LinearSystem(A, B, C, D))
@numpy_compare.check_nonsymbolic_types
def test_linear_transform_density(self, T):
dut = LinearTransformDensity_[T](distribution=RandomDistribution.
kGaussian, input_size=3, output_size=3)
w_in = np.array([T(0.5), T(0.1), T(1.5)])
context = dut.CreateDefaultContext()
dut.get_input_port_w_in().FixValue(context, w_in)
self.assertEqual(dut.get_input_port_A().size(), 9)
self.assertEqual(dut.get_input_port_b().size(), 3)
self.assertEqual(dut.get_distribution(), RandomDistribution.kGaussian)
A = np.array([[T(0.5), T(1), T(2)], [T(1), T(2), T(3)], [T(3), T(4),
T(5)]])
dut.FixConstantA(context=context, A=A)
b = np.array([T(1), T(2), T(3)])
dut.FixConstantB(context=context, b=b)
dut.CalcDensity(context=context)
self.assertEqual(dut.get_output_port_w_out().size(), 3)
self.assertEqual(dut.get_output_port_w_out_density().size(), 1)
def test_vector_pass_through(self):
model_value = BasicVector([1.0, 2, 3])
system = PassThrough(vector_size=model_value.size())
context = system.CreateDefaultContext()
system.get_input_port(0).FixValue(context, model_value)
output = system.AllocateOutput()
input_eval = system.EvalVectorInput(context, 0)
compare_value(self, input_eval, model_value)
system.CalcOutput(context, output)
output_value = output.get_vector_data(0)
compare_value(self, output_value, model_value)
def test_default_vector_pass_through(self):
model_value = [1.0, 2, 3]
system = PassThrough(value=model_value)
context = system.CreateDefaultContext()
np.testing.assert_array_equal(model_value, system.get_output_port()
.Eval(context))
def test_abstract_pass_through(self):
model_value = Value('Hello world')
system = PassThrough(abstract_model_value=model_value)
context = system.CreateDefaultContext()
system.get_input_port(0).FixValue(context, model_value)
output = system.AllocateOutput()
input_eval = system.EvalAbstractInput(context, 0)
compare_value(self, input_eval, model_value)
system.CalcOutput(context, output)
output_value = output.get_data(0)
compare_value(self, output_value, model_value)
def test_port_switch(self):
system = PortSwitch(vector_size=2)
a = system.DeclareInputPort(name='a')
system.DeclareInputPort(name='b')
context = system.CreateDefaultContext()
self.assertIsInstance(a, InputPort)
system.get_port_selector_input_port().FixValue(context, a.get_index())
def test_first_order_low_pass_filter(self):
filter1 = FirstOrderLowPassFilter(time_constant=3.0, size=4)
self.assertEqual(filter1.get_time_constant(), 3.0)
alpha = np.array([1, 2, 3])
filter2 = FirstOrderLowPassFilter(time_constants=alpha)
np.testing.assert_array_equal(filter2.get_time_constants_vector(),
alpha)
context = filter2.CreateDefaultContext()
filter2.set_initial_output_value(context, [0.0, -0.2, 0.4])
<|reserved_special_token_0|>
def test_saturation(self):
system = Saturation((0.0, -1.0, 3.0), (1.0, 2.0, 4.0))
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
system.get_input_port(0).FixValue(context, input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).
CopyToVector(), expected))
mytest((-5.0, 5.0, 4.0), (0.0, 2.0, 4.0))
mytest((0.4, 0.0, 3.5), (0.4, 0.0, 3.5))
def test_trajectory_source(self):
ppt = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[2.0, 3.0], [
2.0, 1.0]])
system = TrajectorySource(trajectory=ppt, output_derivative_order=0,
zero_derivatives_beyond_limits=True)
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
context.SetTime(input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).
CopyToVector(), expected))
mytest(0.0, (2.0, 2.0))
mytest(0.5, (2.5, 1.5))
mytest(1.0, (3.0, 1.0))
ppt2 = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[4.0, 6.0],
[4.0, 2.0]])
system.UpdateTrajectory(trajectory=ppt2)
mytest(0.0, (4.0, 4.0))
mytest(0.5, (5.0, 3.0))
mytest(1.0, (6.0, 2.0))
def test_symbolic_vector_system(self):
t = Variable('t')
x = [Variable('x0'), Variable('x1')]
u = [Variable('u0'), Variable('u1')]
system = SymbolicVectorSystem(time=t, state=x, input=u, dynamics=[x
[0] + x[1], t], output=[u[1]], time_period=0.0)
context = system.CreateDefaultContext()
self.assertEqual(context.num_continuous_states(), 2)
self.assertEqual(context.num_discrete_state_groups(), 0)
self.assertEqual(system.get_input_port(0).size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertEqual(context.num_abstract_parameters(), 0)
self.assertEqual(context.num_numeric_parameter_groups(), 0)
self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(x[0] + x[1])
)
self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))
def test_symbolic_vector_system_parameters(self):
t = Variable('t')
x = [Variable('x0'), Variable('x1')]
u = [Variable('u0'), Variable('u1')]
p = [Variable('p0'), Variable('p1')]
system = SymbolicVectorSystem(time=t, state=x, input=u, parameter=p,
dynamics=[p[0] * x[0] + x[1] + p[1], t], output=[u[1]],
time_period=0.0)
context = system.CreateDefaultContext()
self.assertEqual(context.num_continuous_states(), 2)
self.assertEqual(context.num_discrete_state_groups(), 0)
self.assertEqual(system.get_input_port(0).size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertEqual(context.num_abstract_parameters(), 0)
self.assertEqual(context.num_numeric_parameter_groups(), 1)
self.assertEqual(context.get_numeric_parameter(0).size(), 2)
self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(p[0] * x
[0] + x[1] + p[1]))
self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))
def test_wrap_to_system(self):
system = WrapToSystem(2)
system.set_interval(1, 1.0, 2.0)
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
system.get_input_port(0).FixValue(context, input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).
CopyToVector(), expected))
mytest((-1.5, 0.5), (-1.5, 1.5))
mytest((0.2, 0.3), (0.2, 1.3))
def test_demultiplexer(self):
demux = Demultiplexer(size=4)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), 4)
numpy_compare.assert_equal(demux.get_output_ports_sizes(), [1, 1, 1, 1]
)
input_vec = np.array([1.0, 2.0, 3.0, 4.0])
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
for i in range(4):
self.assertTrue(np.allclose(output.get_vector_data(i).get_value
(), input_vec[i]))
demux = Demultiplexer(size=4, output_ports_size=2)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), 2)
numpy_compare.assert_equal(demux.get_output_ports_sizes(), [2, 2])
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
for i in range(2):
self.assertTrue(np.allclose(output.get_vector_data(i).get_value
(), input_vec[2 * i:2 * i + 2]))
output_ports_sizes = np.array([1, 2, 1])
num_output_ports = output_ports_sizes.size
input_vec = np.array([1.0, 2.0, 3.0, 4.0])
demux = Demultiplexer(output_ports_sizes=output_ports_sizes)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), num_output_ports)
numpy_compare.assert_equal(demux.get_output_ports_sizes(),
output_ports_sizes)
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
output_port_start = 0
for i in range(num_output_ports):
output_port_size = output.get_vector_data(i).size()
self.assertTrue(np.allclose(output.get_vector_data(i).get_value
(), input_vec[output_port_start:output_port_start +
output_port_size]))
output_port_start += output_port_size
<|reserved_special_token_0|>
def test_multilayer_perceptron(self):
mlp = MultilayerPerceptron(layers=[1, 2, 3], activation_type=
PerceptronActivationType.kReLU)
self.assertEqual(mlp.get_input_port().size(), 1)
self.assertEqual(mlp.get_output_port().size(), 3)
context = mlp.CreateDefaultContext()
params = np.zeros((mlp.num_parameters(), 1))
self.assertEqual(mlp.num_parameters(), 13)
self.assertEqual(mlp.layers(), [1, 2, 3])
self.assertEqual(mlp.activation_type(layer=0),
PerceptronActivationType.kReLU)
self.assertEqual(len(mlp.GetParameters(context=context)), mlp.
num_parameters())
mlp.SetWeights(context=context, layer=0, W=np.array([[1], [2]]))
mlp.SetBiases(context=context, layer=0, b=[3, 4])
np.testing.assert_array_equal(mlp.GetWeights(context=context, layer
=0), np.array([[1], [2]]))
np.testing.assert_array_equal(mlp.GetBiases(context=context, layer=
0), np.array([3, 4]))
params = np.zeros(mlp.num_parameters())
mlp.SetWeights(params=params, layer=0, W=np.array([[1], [2]]))
mlp.SetBiases(params=params, layer=0, b=[3, 4])
np.testing.assert_array_equal(mlp.GetWeights(params=params, layer=0
), np.array([[1], [2]]))
np.testing.assert_array_equal(mlp.GetBiases(params=params, layer=0),
np.array([3, 4]))
mutable_params = mlp.GetMutableParameters(context=context)
mutable_params[:] = 3.0
np.testing.assert_array_equal(mlp.GetParameters(context), np.full(
mlp.num_parameters(), 3.0))
global called_loss
called_loss = False
def silly_loss(Y, dloss_dY):
global called_loss
called_loss = True
dloss_dY[:] = 1
return Y.sum()
dloss_dparams = np.zeros((13,))
generator = RandomGenerator(23)
mlp.SetRandomContext(context, generator)
mlp.Backpropagation(context=context, X=np.array([1, 3, 4]).reshape(
(1, 3)), loss=silly_loss, dloss_dparams=dloss_dparams)
self.assertTrue(called_loss)
self.assertTrue(dloss_dparams.any())
dloss_dparams = np.zeros((13,))
mlp.BackpropagationMeanSquaredError(context=context, X=np.array([1,
3, 4]).reshape((1, 3)), Y_desired=np.eye(3), dloss_dparams=
dloss_dparams)
self.assertTrue(dloss_dparams.any())
Y = np.asfortranarray(np.eye(3))
mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]), Y=Y)
self.assertFalse(np.allclose(Y, np.eye(3)))
Y2 = mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]))
np.testing.assert_array_equal(Y, Y2)
mlp2 = MultilayerPerceptron(layers=[3, 2, 1], activation_types=[
PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])
self.assertEqual(mlp2.activation_type(0), PerceptronActivationType.
kReLU)
self.assertEqual(mlp2.activation_type(1), PerceptronActivationType.
kTanh)
Y = np.asfortranarray(np.full((1, 3), 2.4))
dYdX = np.asfortranarray(np.full((3, 3), 5.3))
context2 = mlp2.CreateDefaultContext()
mlp2.BatchOutput(context=context2, X=np.eye(3), Y=Y, dYdX=dYdX)
np.testing.assert_array_almost_equal(Y, np.zeros((1, 3)))
np.testing.assert_array_almost_equal(dYdX, np.zeros((3, 3)))
mlp = MultilayerPerceptron(use_sin_cos_for_input=[True, False],
remaining_layers=[3, 2], activation_types=[
PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])
self.assertEqual(mlp.get_input_port().size(), 2)
np.testing.assert_array_equal(mlp.layers(), [3, 3, 2])
def test_random_source(self):
source = RandomSource(distribution=RandomDistribution.kUniform,
num_outputs=2, sampling_interval_sec=0.01)
self.assertEqual(source.get_output_port(0).size(), 2)
builder = DiagramBuilder()
AddRandomInputs(sampling_interval_sec=0.01, builder=builder)
builder_ad = DiagramBuilder_[AutoDiffXd]()
AddRandomInputs(sampling_interval_sec=0.01, builder=builder_ad)
def test_constant_vector_source(self):
source = ConstantVectorSource(source_value=[1.0, 2.0])
context = source.CreateDefaultContext()
source.get_source_value(context)
source.get_mutable_source_value(context)
def test_ctor_api(self):
"""Tests construction of systems for systems whose executions semantics
are not tested above.
"""
ConstantValueSource(Value('Hello world'))
DiscreteTimeDelay(update_sec=0.1, delay_time_steps=5, vector_size=2)
DiscreteTimeDelay(update_sec=0.1, delay_time_steps=5,
abstract_model_value=Value('Hello world'))
with catch_drake_warnings(expected_count=2) as w:
DiscreteTimeDelay(update_sec=0.1, delay_timesteps=5, vector_size=2)
DiscreteTimeDelay(update_sec=0.1, delay_timesteps=5,
abstract_model_value=Value('Hello world'))
ZeroOrderHold(period_sec=0.1, offset_sec=0.0, vector_size=2)
dut = ZeroOrderHold(period_sec=1.0, offset_sec=0.25,
abstract_model_value=Value('Hello world'))
self.assertEqual(dut.period(), 1.0)
self.assertEqual(dut.offset(), 0.25)
def test_shared_pointer_system_ctor(self):
dut = SharedPointerSystem(value_to_hold=[1, 2, 3])
readback = dut.get()
self.assertListEqual(readback, [1, 2, 3])
del dut
self.assertListEqual(readback, [1, 2, 3])
def test_shared_pointer_system_builder(self):
builder = DiagramBuilder()
self.assertListEqual(SharedPointerSystem.AddToBuilder(builder=
builder, value_to_hold=[1, 2, 3]), [1, 2, 3])
diagram = builder.Build()
del builder
readback = diagram.GetSystems()[0].get()
self.assertListEqual(readback, [1, 2, 3])
del diagram
self.assertListEqual(readback, [1, 2, 3])
def test_sine(self):
sine_source = Sine(amplitude=1, frequency=2, phase=3, size=1,
is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 1)
self.assertEqual(sine_source.get_output_port(1).size(), 1)
self.assertEqual(sine_source.get_output_port(2).size(), 1)
sine_source = Sine(amplitude=1, frequency=2, phase=3, size=3,
is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 3)
self.assertEqual(sine_source.get_output_port(1).size(), 3)
self.assertEqual(sine_source.get_output_port(2).size(), 3)
sine_source = Sine(amplitudes=np.ones(2), frequencies=np.ones(2),
phases=np.ones(2), is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 2)
self.assertEqual(sine_source.get_output_port(1).size(), 2)
self.assertEqual(sine_source.get_output_port(2).size(), 2)
def test_discrete_derivative(self):
discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=0.5)
self.assertEqual(discrete_derivative.get_input_port(0).size(), 5)
self.assertEqual(discrete_derivative.get_output_port(0).size(), 5)
self.assertEqual(discrete_derivative.time_step(), 0.5)
self.assertTrue(discrete_derivative.suppress_initial_transient())
discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=
0.5, suppress_initial_transient=False)
self.assertFalse(discrete_derivative.suppress_initial_transient())
<|reserved_special_token_0|>
@numpy_compare.check_nonsymbolic_types
def test_log_vector_output(self, T):
builder = DiagramBuilder_[T]()
kSize = 1
integrator = builder.AddSystem(Integrator_[T](kSize))
port = integrator.get_output_port(0)
loggers = []
loggers.append(LogVectorOutput(port, builder))
loggers.append(LogVectorOutput(src=port, builder=builder))
loggers.append(LogVectorOutput(port, builder, 0.125))
loggers.append(LogVectorOutput(src=port, builder=builder,
publish_period=0.125))
loggers.append(LogVectorOutput(port, builder, {TriggerType.kForced}))
loggers.append(LogVectorOutput(src=port, builder=builder,
publish_triggers={TriggerType.kForced}))
loggers.append(LogVectorOutput(port, builder, {TriggerType.
kPeriodic}, 0.125))
loggers.append(LogVectorOutput(src=port, builder=builder,
publish_triggers={TriggerType.kPeriodic}, publish_period=0.125))
diagram = builder.Build()
context = diagram.CreateDefaultContext()
self.assertTrue(all(logger.FindLog(context).num_samples() == 0 for
logger in loggers))
<|reserved_special_token_0|>
@numpy_compare.check_nonsymbolic_types
def test_vector_log_sink(self, T):
builder = DiagramBuilder_[T]()
kSize = 1
constructors = [VectorLogSink_[T]]
loggers = []
if T == float:
constructors.append(VectorLogSink)
for constructor in constructors:
loggers.append(builder.AddSystem(constructor(kSize)))
loggers.append(builder.AddSystem(constructor(input_size=kSize)))
loggers.append(builder.AddSystem(constructor(kSize, 0.125)))
loggers.append(builder.AddSystem(constructor(input_size=kSize,
publish_period=0.125)))
loggers.append(builder.AddSystem(constructor(kSize, {
TriggerType.kForced})))
loggers.append(builder.AddSystem(constructor(input_size=kSize,
publish_triggers={TriggerType.kForced})))
loggers.append(builder.AddSystem(constructor(kSize, {
TriggerType.kPeriodic}, 0.125)))
loggers.append(builder.AddSystem(constructor(input_size=kSize,
publish_triggers={TriggerType.kPeriodic}, publish_period=
0.125)))
diagram = builder.Build()
context = diagram.CreateDefaultContext()
self.assertTrue(all(logger.FindLog(context) == logger.
FindMutableLog(context) for logger in loggers))
loggers_and_contexts = [(x, x.GetMyContextFromRoot(context)) for x in
loggers]
self.assertTrue(all(logger.GetLog(logger_context) == logger.
GetMutableLog(logger_context) for logger, logger_context in
loggers_and_contexts))
self.assertTrue(all(logger.GetLog(logger_context) == logger.FindLog
(context) for logger, logger_context in loggers_and_contexts))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestGeneral(unittest.TestCase):
def _check_instantiations(self, template, supports_symbolic=True):
default_cls = template[None]
self.assertTrue(template[float] is default_cls)
self.assertTrue(template[AutoDiffXd] is not default_cls)
if supports_symbolic:
self.assertTrue(template[Expression] is not default_cls)
def test_instantiations(self):
self._check_instantiations(Adder_)
self._check_instantiations(AffineSystem_)
self._check_instantiations(ConstantValueSource_)
self._check_instantiations(ConstantVectorSource_)
self._check_instantiations(Demultiplexer_)
self._check_instantiations(DiscreteDerivative_)
self._check_instantiations(DiscreteTimeDelay_)
self._check_instantiations(Gain_)
self._check_instantiations(Integrator_)
self._check_instantiations(LinearSystem_)
self._check_instantiations(LinearTransformDensity_,
supports_symbolic=False)
self._check_instantiations(Multiplexer_)
self._check_instantiations(MultilayerPerceptron_)
self._check_instantiations(PassThrough_)
self._check_instantiations(PortSwitch_)
self._check_instantiations(Saturation_)
self._check_instantiations(SharedPointerSystem_)
self._check_instantiations(Sine_)
self._check_instantiations(StateInterpolatorWithDiscreteDerivative_)
self._check_instantiations(SymbolicVectorSystem_)
self._check_instantiations(TrajectoryAffineSystem_,
supports_symbolic=False)
self._check_instantiations(TrajectoryLinearSystem_,
supports_symbolic=False)
self._check_instantiations(TrajectorySource_)
self._check_instantiations(VectorLogSink_)
self._check_instantiations(WrapToSystem_)
self._check_instantiations(ZeroOrderHold_)
def test_linear_affine_system(self):
A = np.identity(2)
B = np.array([[0], [1]])
f0 = np.array([[0], [0]])
C = np.array([[0, 1]])
D = [1]
y0 = [0]
system = LinearSystem(A, B, C, D)
context = system.CreateDefaultContext()
self.assertEqual(system.get_input_port(0).size(), 1)
self.assertEqual(context.get_mutable_continuous_state_vector().size
(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertTrue((system.A() == A).all())
self.assertTrue((system.B() == B).all())
self.assertTrue((system.f0() == f0).all())
self.assertTrue((system.C() == C).all())
self.assertEqual(system.D(), D)
self.assertEqual(system.y0(), y0)
self.assertEqual(system.time_period(), 0.0)
x0 = np.array([1, 2])
system.configure_default_state(x0=x0)
system.SetDefaultContext(context)
np.testing.assert_equal(context.get_continuous_state_vector().
CopyToVector(), x0)
generator = RandomGenerator()
system.SetRandomContext(context, generator)
np.testing.assert_equal(context.get_continuous_state_vector().
CopyToVector(), x0)
system.configure_random_state(covariance=np.eye(2))
system.SetRandomContext(context, generator)
self.assertNotEqual(context.get_continuous_state_vector().
CopyToVector()[1], x0[1])
Co = ControllabilityMatrix(system)
self.assertEqual(Co.shape, (2, 2))
self.assertFalse(IsControllable(system))
self.assertFalse(IsControllable(system, 1e-06))
self.assertFalse(IsStabilizable(sys=system))
self.assertFalse(IsStabilizable(sys=system, threshold=1e-06))
Ob = ObservabilityMatrix(system)
self.assertEqual(Ob.shape, (2, 2))
self.assertFalse(IsObservable(system))
self.assertFalse(IsDetectable(sys=system))
self.assertFalse(IsDetectable(sys=system, threshold=1e-06))
system = AffineSystem(A, B, f0, C, D, y0, 0.1)
self.assertEqual(system.get_input_port(0), system.get_input_port())
self.assertEqual(system.get_output_port(0), system.get_output_port())
context = system.CreateDefaultContext()
self.assertEqual(system.get_input_port(0).size(), 1)
self.assertEqual(context.get_discrete_state_vector().size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertTrue((system.A() == A).all())
self.assertTrue((system.B() == B).all())
self.assertTrue((system.f0() == f0).all())
self.assertTrue((system.C() == C).all())
self.assertEqual(system.D(), D)
self.assertEqual(system.y0(), y0)
self.assertEqual(system.time_period(), 0.1)
system.get_input_port(0).FixValue(context, 0)
linearized = Linearize(system, context)
self.assertTrue((linearized.A() == A).all())
taylor = FirstOrderTaylorApproximation(system, context)
self.assertTrue((taylor.y0() == y0).all())
new_A = np.array([[1, 2], [3, 4]])
new_B = np.array([[5], [6]])
new_f0 = np.array([[7], [8]])
new_C = np.array([[9, 10]])
new_D = np.array([[11]])
new_y0 = np.array([12])
system.UpdateCoefficients(A=new_A, B=new_B, f0=new_f0, C=new_C, D=
new_D, y0=new_y0)
np.testing.assert_equal(new_A, system.A())
np.testing.assert_equal(new_B, system.B())
np.testing.assert_equal(new_f0.flatten(), system.f0())
np.testing.assert_equal(new_C, system.C())
np.testing.assert_equal(new_D, system.D())
np.testing.assert_equal(new_y0, system.y0())
system = MatrixGain(D=A)
self.assertTrue((system.D() == A).all())
system = TrajectoryAffineSystem(PiecewisePolynomial(A),
PiecewisePolynomial(B), PiecewisePolynomial(f0),
PiecewisePolynomial(C), PiecewisePolynomial(D),
PiecewisePolynomial(y0), 0.1)
self.assertEqual(system.get_input_port(0), system.get_input_port())
self.assertEqual(system.get_output_port(0), system.get_output_port())
context = system.CreateDefaultContext()
self.assertEqual(system.get_input_port(0).size(), 1)
self.assertEqual(context.get_discrete_state_vector().size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
for t in np.linspace(0.0, 1.0, 5):
self.assertTrue((system.A(t) == A).all())
self.assertTrue((system.B(t) == B).all())
self.assertTrue((system.f0(t) == f0).all())
self.assertTrue((system.C(t) == C).all())
self.assertEqual(system.D(t), D)
self.assertEqual(system.y0(t), y0)
self.assertEqual(system.time_period(), 0.1)
x0 = np.array([1, 2])
system.configure_default_state(x0=x0)
system.SetDefaultContext(context)
np.testing.assert_equal(context.get_discrete_state_vector().
CopyToVector(), x0)
generator = RandomGenerator()
system.SetRandomContext(context, generator)
np.testing.assert_equal(context.get_discrete_state_vector().
CopyToVector(), x0)
system.configure_random_state(covariance=np.eye(2))
system.SetRandomContext(context, generator)
self.assertNotEqual(context.get_discrete_state_vector().
CopyToVector()[1], x0[1])
system = TrajectoryLinearSystem(A=PiecewisePolynomial(A), B=
PiecewisePolynomial(B), C=PiecewisePolynomial(C), D=
PiecewisePolynomial(D), time_period=0.1)
self.assertEqual(system.time_period(), 0.1)
system.configure_default_state(x0=np.array([1, 2]))
system.configure_random_state(covariance=np.eye(2))
def test_linear_affine_system_empty_matrices(self):
def CheckSizes(system, num_states, num_inputs, num_outputs):
self.assertEqual(system.num_continuous_states(), num_states)
self.assertEqual(system.num_inputs(), num_inputs)
self.assertEqual(system.num_outputs(), num_outputs)
system = AffineSystem(y0=[2, 1])
CheckSizes(system, num_states=0, num_inputs=0, num_outputs=2)
system = AffineSystem(D=np.eye(2))
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = LinearSystem(D=np.eye(2))
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = AffineSystem(D=np.eye(2), y0=[1, 2])
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = LinearSystem(B=np.eye(2))
CheckSizes(system, num_states=2, num_inputs=2, num_outputs=0)
def test_linear_system_zero_size(self):
num_x = 0
num_y = 2
num_u = 2
A = np.zeros((num_x, num_x))
B = np.zeros((num_x, num_u))
C = np.zeros((num_y, num_x))
D = np.zeros((num_y, num_u))
self.assertIsNotNone(LinearSystem(A, B, C, D))
@numpy_compare.check_nonsymbolic_types
def test_linear_transform_density(self, T):
dut = LinearTransformDensity_[T](distribution=RandomDistribution.
kGaussian, input_size=3, output_size=3)
w_in = np.array([T(0.5), T(0.1), T(1.5)])
context = dut.CreateDefaultContext()
dut.get_input_port_w_in().FixValue(context, w_in)
self.assertEqual(dut.get_input_port_A().size(), 9)
self.assertEqual(dut.get_input_port_b().size(), 3)
self.assertEqual(dut.get_distribution(), RandomDistribution.kGaussian)
A = np.array([[T(0.5), T(1), T(2)], [T(1), T(2), T(3)], [T(3), T(4),
T(5)]])
dut.FixConstantA(context=context, A=A)
b = np.array([T(1), T(2), T(3)])
dut.FixConstantB(context=context, b=b)
dut.CalcDensity(context=context)
self.assertEqual(dut.get_output_port_w_out().size(), 3)
self.assertEqual(dut.get_output_port_w_out_density().size(), 1)
def test_vector_pass_through(self):
model_value = BasicVector([1.0, 2, 3])
system = PassThrough(vector_size=model_value.size())
context = system.CreateDefaultContext()
system.get_input_port(0).FixValue(context, model_value)
output = system.AllocateOutput()
input_eval = system.EvalVectorInput(context, 0)
compare_value(self, input_eval, model_value)
system.CalcOutput(context, output)
output_value = output.get_vector_data(0)
compare_value(self, output_value, model_value)
def test_default_vector_pass_through(self):
model_value = [1.0, 2, 3]
system = PassThrough(value=model_value)
context = system.CreateDefaultContext()
np.testing.assert_array_equal(model_value, system.get_output_port()
.Eval(context))
def test_abstract_pass_through(self):
model_value = Value('Hello world')
system = PassThrough(abstract_model_value=model_value)
context = system.CreateDefaultContext()
system.get_input_port(0).FixValue(context, model_value)
output = system.AllocateOutput()
input_eval = system.EvalAbstractInput(context, 0)
compare_value(self, input_eval, model_value)
system.CalcOutput(context, output)
output_value = output.get_data(0)
compare_value(self, output_value, model_value)
def test_port_switch(self):
system = PortSwitch(vector_size=2)
a = system.DeclareInputPort(name='a')
system.DeclareInputPort(name='b')
context = system.CreateDefaultContext()
self.assertIsInstance(a, InputPort)
system.get_port_selector_input_port().FixValue(context, a.get_index())
def test_first_order_low_pass_filter(self):
filter1 = FirstOrderLowPassFilter(time_constant=3.0, size=4)
self.assertEqual(filter1.get_time_constant(), 3.0)
alpha = np.array([1, 2, 3])
filter2 = FirstOrderLowPassFilter(time_constants=alpha)
np.testing.assert_array_equal(filter2.get_time_constants_vector(),
alpha)
context = filter2.CreateDefaultContext()
filter2.set_initial_output_value(context, [0.0, -0.2, 0.4])
<|reserved_special_token_0|>
def test_saturation(self):
system = Saturation((0.0, -1.0, 3.0), (1.0, 2.0, 4.0))
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
system.get_input_port(0).FixValue(context, input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).
CopyToVector(), expected))
mytest((-5.0, 5.0, 4.0), (0.0, 2.0, 4.0))
mytest((0.4, 0.0, 3.5), (0.4, 0.0, 3.5))
def test_trajectory_source(self):
ppt = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[2.0, 3.0], [
2.0, 1.0]])
system = TrajectorySource(trajectory=ppt, output_derivative_order=0,
zero_derivatives_beyond_limits=True)
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
context.SetTime(input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).
CopyToVector(), expected))
mytest(0.0, (2.0, 2.0))
mytest(0.5, (2.5, 1.5))
mytest(1.0, (3.0, 1.0))
ppt2 = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[4.0, 6.0],
[4.0, 2.0]])
system.UpdateTrajectory(trajectory=ppt2)
mytest(0.0, (4.0, 4.0))
mytest(0.5, (5.0, 3.0))
mytest(1.0, (6.0, 2.0))
def test_symbolic_vector_system(self):
t = Variable('t')
x = [Variable('x0'), Variable('x1')]
u = [Variable('u0'), Variable('u1')]
system = SymbolicVectorSystem(time=t, state=x, input=u, dynamics=[x
[0] + x[1], t], output=[u[1]], time_period=0.0)
context = system.CreateDefaultContext()
self.assertEqual(context.num_continuous_states(), 2)
self.assertEqual(context.num_discrete_state_groups(), 0)
self.assertEqual(system.get_input_port(0).size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertEqual(context.num_abstract_parameters(), 0)
self.assertEqual(context.num_numeric_parameter_groups(), 0)
self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(x[0] + x[1])
)
self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))
def test_symbolic_vector_system_parameters(self):
t = Variable('t')
x = [Variable('x0'), Variable('x1')]
u = [Variable('u0'), Variable('u1')]
p = [Variable('p0'), Variable('p1')]
system = SymbolicVectorSystem(time=t, state=x, input=u, parameter=p,
dynamics=[p[0] * x[0] + x[1] + p[1], t], output=[u[1]],
time_period=0.0)
context = system.CreateDefaultContext()
self.assertEqual(context.num_continuous_states(), 2)
self.assertEqual(context.num_discrete_state_groups(), 0)
self.assertEqual(system.get_input_port(0).size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertEqual(context.num_abstract_parameters(), 0)
self.assertEqual(context.num_numeric_parameter_groups(), 1)
self.assertEqual(context.get_numeric_parameter(0).size(), 2)
self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(p[0] * x
[0] + x[1] + p[1]))
self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))
def test_wrap_to_system(self):
system = WrapToSystem(2)
system.set_interval(1, 1.0, 2.0)
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
system.get_input_port(0).FixValue(context, input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).
CopyToVector(), expected))
mytest((-1.5, 0.5), (-1.5, 1.5))
mytest((0.2, 0.3), (0.2, 1.3))
def test_demultiplexer(self):
demux = Demultiplexer(size=4)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), 4)
numpy_compare.assert_equal(demux.get_output_ports_sizes(), [1, 1, 1, 1]
)
input_vec = np.array([1.0, 2.0, 3.0, 4.0])
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
for i in range(4):
self.assertTrue(np.allclose(output.get_vector_data(i).get_value
(), input_vec[i]))
demux = Demultiplexer(size=4, output_ports_size=2)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), 2)
numpy_compare.assert_equal(demux.get_output_ports_sizes(), [2, 2])
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
for i in range(2):
self.assertTrue(np.allclose(output.get_vector_data(i).get_value
(), input_vec[2 * i:2 * i + 2]))
output_ports_sizes = np.array([1, 2, 1])
num_output_ports = output_ports_sizes.size
input_vec = np.array([1.0, 2.0, 3.0, 4.0])
demux = Demultiplexer(output_ports_sizes=output_ports_sizes)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), num_output_ports)
numpy_compare.assert_equal(demux.get_output_ports_sizes(),
output_ports_sizes)
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
output_port_start = 0
for i in range(num_output_ports):
output_port_size = output.get_vector_data(i).size()
self.assertTrue(np.allclose(output.get_vector_data(i).get_value
(), input_vec[output_port_start:output_port_start +
output_port_size]))
output_port_start += output_port_size
def test_multiplexer(self):
my_vector = MyVector2(data=[1.0, 2.0])
test_cases = [dict(has_vector=False, mux=Multiplexer(
num_scalar_inputs=4), data=[[5.0], [3.0], [4.0], [2.0]]), dict(
has_vector=False, mux=Multiplexer(input_sizes=[2, 3]), data=[[
8.0, 4.0], [3.0, 6.0, 9.0]]), dict(has_vector=True, mux=
Multiplexer(model_vector=my_vector), data=[[42.0], [3.0]])]
for case in test_cases:
mux = case['mux']
port_size = sum([len(vec) for vec in case['data']])
self.assertEqual(mux.get_output_port(0).size(), port_size)
context = mux.CreateDefaultContext()
output = mux.AllocateOutput()
num_ports = len(case['data'])
self.assertEqual(context.num_input_ports(), num_ports)
for j, vec in enumerate(case['data']):
mux.get_input_port(j).FixValue(context, vec)
mux.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(0).get_value
(), [elem for vec in case['data'] for elem in vec]))
if case['has_vector']:
value = output.get_vector_data(0)
self.assertTrue(isinstance(value, MyVector2))
def test_multilayer_perceptron(self):
mlp = MultilayerPerceptron(layers=[1, 2, 3], activation_type=
PerceptronActivationType.kReLU)
self.assertEqual(mlp.get_input_port().size(), 1)
self.assertEqual(mlp.get_output_port().size(), 3)
context = mlp.CreateDefaultContext()
params = np.zeros((mlp.num_parameters(), 1))
self.assertEqual(mlp.num_parameters(), 13)
self.assertEqual(mlp.layers(), [1, 2, 3])
self.assertEqual(mlp.activation_type(layer=0),
PerceptronActivationType.kReLU)
self.assertEqual(len(mlp.GetParameters(context=context)), mlp.
num_parameters())
mlp.SetWeights(context=context, layer=0, W=np.array([[1], [2]]))
mlp.SetBiases(context=context, layer=0, b=[3, 4])
np.testing.assert_array_equal(mlp.GetWeights(context=context, layer
=0), np.array([[1], [2]]))
np.testing.assert_array_equal(mlp.GetBiases(context=context, layer=
0), np.array([3, 4]))
params = np.zeros(mlp.num_parameters())
mlp.SetWeights(params=params, layer=0, W=np.array([[1], [2]]))
mlp.SetBiases(params=params, layer=0, b=[3, 4])
np.testing.assert_array_equal(mlp.GetWeights(params=params, layer=0
), np.array([[1], [2]]))
np.testing.assert_array_equal(mlp.GetBiases(params=params, layer=0),
np.array([3, 4]))
mutable_params = mlp.GetMutableParameters(context=context)
mutable_params[:] = 3.0
np.testing.assert_array_equal(mlp.GetParameters(context), np.full(
mlp.num_parameters(), 3.0))
global called_loss
called_loss = False
def silly_loss(Y, dloss_dY):
global called_loss
called_loss = True
dloss_dY[:] = 1
return Y.sum()
dloss_dparams = np.zeros((13,))
generator = RandomGenerator(23)
mlp.SetRandomContext(context, generator)
mlp.Backpropagation(context=context, X=np.array([1, 3, 4]).reshape(
(1, 3)), loss=silly_loss, dloss_dparams=dloss_dparams)
self.assertTrue(called_loss)
self.assertTrue(dloss_dparams.any())
dloss_dparams = np.zeros((13,))
mlp.BackpropagationMeanSquaredError(context=context, X=np.array([1,
3, 4]).reshape((1, 3)), Y_desired=np.eye(3), dloss_dparams=
dloss_dparams)
self.assertTrue(dloss_dparams.any())
Y = np.asfortranarray(np.eye(3))
mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]), Y=Y)
self.assertFalse(np.allclose(Y, np.eye(3)))
Y2 = mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]))
np.testing.assert_array_equal(Y, Y2)
mlp2 = MultilayerPerceptron(layers=[3, 2, 1], activation_types=[
PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])
self.assertEqual(mlp2.activation_type(0), PerceptronActivationType.
kReLU)
self.assertEqual(mlp2.activation_type(1), PerceptronActivationType.
kTanh)
Y = np.asfortranarray(np.full((1, 3), 2.4))
dYdX = np.asfortranarray(np.full((3, 3), 5.3))
context2 = mlp2.CreateDefaultContext()
mlp2.BatchOutput(context=context2, X=np.eye(3), Y=Y, dYdX=dYdX)
np.testing.assert_array_almost_equal(Y, np.zeros((1, 3)))
np.testing.assert_array_almost_equal(dYdX, np.zeros((3, 3)))
mlp = MultilayerPerceptron(use_sin_cos_for_input=[True, False],
remaining_layers=[3, 2], activation_types=[
PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])
self.assertEqual(mlp.get_input_port().size(), 2)
np.testing.assert_array_equal(mlp.layers(), [3, 3, 2])
def test_random_source(self):
source = RandomSource(distribution=RandomDistribution.kUniform,
num_outputs=2, sampling_interval_sec=0.01)
self.assertEqual(source.get_output_port(0).size(), 2)
builder = DiagramBuilder()
AddRandomInputs(sampling_interval_sec=0.01, builder=builder)
builder_ad = DiagramBuilder_[AutoDiffXd]()
AddRandomInputs(sampling_interval_sec=0.01, builder=builder_ad)
def test_constant_vector_source(self):
source = ConstantVectorSource(source_value=[1.0, 2.0])
context = source.CreateDefaultContext()
source.get_source_value(context)
source.get_mutable_source_value(context)
def test_ctor_api(self):
"""Tests construction of systems for systems whose executions semantics
are not tested above.
"""
ConstantValueSource(Value('Hello world'))
DiscreteTimeDelay(update_sec=0.1, delay_time_steps=5, vector_size=2)
DiscreteTimeDelay(update_sec=0.1, delay_time_steps=5,
abstract_model_value=Value('Hello world'))
with catch_drake_warnings(expected_count=2) as w:
DiscreteTimeDelay(update_sec=0.1, delay_timesteps=5, vector_size=2)
DiscreteTimeDelay(update_sec=0.1, delay_timesteps=5,
abstract_model_value=Value('Hello world'))
ZeroOrderHold(period_sec=0.1, offset_sec=0.0, vector_size=2)
dut = ZeroOrderHold(period_sec=1.0, offset_sec=0.25,
abstract_model_value=Value('Hello world'))
self.assertEqual(dut.period(), 1.0)
self.assertEqual(dut.offset(), 0.25)
def test_shared_pointer_system_ctor(self):
dut = SharedPointerSystem(value_to_hold=[1, 2, 3])
readback = dut.get()
self.assertListEqual(readback, [1, 2, 3])
del dut
self.assertListEqual(readback, [1, 2, 3])
def test_shared_pointer_system_builder(self):
builder = DiagramBuilder()
self.assertListEqual(SharedPointerSystem.AddToBuilder(builder=
builder, value_to_hold=[1, 2, 3]), [1, 2, 3])
diagram = builder.Build()
del builder
readback = diagram.GetSystems()[0].get()
self.assertListEqual(readback, [1, 2, 3])
del diagram
self.assertListEqual(readback, [1, 2, 3])
def test_sine(self):
sine_source = Sine(amplitude=1, frequency=2, phase=3, size=1,
is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 1)
self.assertEqual(sine_source.get_output_port(1).size(), 1)
self.assertEqual(sine_source.get_output_port(2).size(), 1)
sine_source = Sine(amplitude=1, frequency=2, phase=3, size=3,
is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 3)
self.assertEqual(sine_source.get_output_port(1).size(), 3)
self.assertEqual(sine_source.get_output_port(2).size(), 3)
sine_source = Sine(amplitudes=np.ones(2), frequencies=np.ones(2),
phases=np.ones(2), is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 2)
self.assertEqual(sine_source.get_output_port(1).size(), 2)
self.assertEqual(sine_source.get_output_port(2).size(), 2)
def test_discrete_derivative(self):
discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=0.5)
self.assertEqual(discrete_derivative.get_input_port(0).size(), 5)
self.assertEqual(discrete_derivative.get_output_port(0).size(), 5)
self.assertEqual(discrete_derivative.time_step(), 0.5)
self.assertTrue(discrete_derivative.suppress_initial_transient())
discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=
0.5, suppress_initial_transient=False)
self.assertFalse(discrete_derivative.suppress_initial_transient())
def test_state_interpolator_with_discrete_derivative(self):
state_interpolator = StateInterpolatorWithDiscreteDerivative(
num_positions=5, time_step=0.4)
self.assertEqual(state_interpolator.get_input_port(0).size(), 5)
self.assertEqual(state_interpolator.get_output_port(0).size(), 10)
self.assertTrue(state_interpolator.suppress_initial_transient())
context = state_interpolator.CreateDefaultContext()
state_interpolator.set_initial_position(context=context, position=5 *
[1.1])
np.testing.assert_array_equal(context.get_discrete_state(0).
CopyToVector(), np.array(5 * [1.1]))
np.testing.assert_array_equal(context.get_discrete_state(1).
CopyToVector(), np.array(5 * [1.1]))
context = state_interpolator.CreateDefaultContext()
state_interpolator.set_initial_position(state=context.get_state(),
position=5 * [1.3])
np.testing.assert_array_equal(context.get_discrete_state(0).
CopyToVector(), np.array(5 * [1.3]))
np.testing.assert_array_equal(context.get_discrete_state(1).
CopyToVector(), np.array(5 * [1.3]))
state_interpolator = StateInterpolatorWithDiscreteDerivative(
num_positions=5, time_step=0.4, suppress_initial_transient=True)
self.assertTrue(state_interpolator.suppress_initial_transient())
@numpy_compare.check_nonsymbolic_types
def test_log_vector_output(self, T):
builder = DiagramBuilder_[T]()
kSize = 1
integrator = builder.AddSystem(Integrator_[T](kSize))
port = integrator.get_output_port(0)
loggers = []
loggers.append(LogVectorOutput(port, builder))
loggers.append(LogVectorOutput(src=port, builder=builder))
loggers.append(LogVectorOutput(port, builder, 0.125))
loggers.append(LogVectorOutput(src=port, builder=builder,
publish_period=0.125))
loggers.append(LogVectorOutput(port, builder, {TriggerType.kForced}))
loggers.append(LogVectorOutput(src=port, builder=builder,
publish_triggers={TriggerType.kForced}))
loggers.append(LogVectorOutput(port, builder, {TriggerType.
kPeriodic}, 0.125))
loggers.append(LogVectorOutput(src=port, builder=builder,
publish_triggers={TriggerType.kPeriodic}, publish_period=0.125))
diagram = builder.Build()
context = diagram.CreateDefaultContext()
self.assertTrue(all(logger.FindLog(context).num_samples() == 0 for
logger in loggers))
<|reserved_special_token_0|>
@numpy_compare.check_nonsymbolic_types
def test_vector_log_sink(self, T):
builder = DiagramBuilder_[T]()
kSize = 1
constructors = [VectorLogSink_[T]]
loggers = []
if T == float:
constructors.append(VectorLogSink)
for constructor in constructors:
loggers.append(builder.AddSystem(constructor(kSize)))
loggers.append(builder.AddSystem(constructor(input_size=kSize)))
loggers.append(builder.AddSystem(constructor(kSize, 0.125)))
loggers.append(builder.AddSystem(constructor(input_size=kSize,
publish_period=0.125)))
loggers.append(builder.AddSystem(constructor(kSize, {
TriggerType.kForced})))
loggers.append(builder.AddSystem(constructor(input_size=kSize,
publish_triggers={TriggerType.kForced})))
loggers.append(builder.AddSystem(constructor(kSize, {
TriggerType.kPeriodic}, 0.125)))
loggers.append(builder.AddSystem(constructor(input_size=kSize,
publish_triggers={TriggerType.kPeriodic}, publish_period=
0.125)))
diagram = builder.Build()
context = diagram.CreateDefaultContext()
self.assertTrue(all(logger.FindLog(context) == logger.
FindMutableLog(context) for logger in loggers))
loggers_and_contexts = [(x, x.GetMyContextFromRoot(context)) for x in
loggers]
self.assertTrue(all(logger.GetLog(logger_context) == logger.
GetMutableLog(logger_context) for logger, logger_context in
loggers_and_contexts))
self.assertTrue(all(logger.GetLog(logger_context) == logger.FindLog
(context) for logger, logger_context in loggers_and_contexts))
<|reserved_special_token_1|>
import gc
import unittest
import numpy as np
from pydrake.autodiffutils import AutoDiffXd
from pydrake.common import RandomDistribution, RandomGenerator
from pydrake.common.test_utilities import numpy_compare
from pydrake.common.test_utilities.deprecation import catch_drake_warnings
from pydrake.common.value import Value
from pydrake.symbolic import Expression, Variable
from pydrake.systems.framework import (
BasicVector,
DiagramBuilder,
DiagramBuilder_,
InputPort,
TriggerType,
VectorBase,
)
from pydrake.systems.test.test_util import (
MyVector2,
)
from pydrake.systems.primitives import (
Adder, Adder_,
AddRandomInputs,
AffineSystem, AffineSystem_,
ConstantValueSource, ConstantValueSource_,
ConstantVectorSource, ConstantVectorSource_,
ControllabilityMatrix,
Demultiplexer, Demultiplexer_,
DiscreteDerivative, DiscreteDerivative_,
DiscreteTimeDelay, DiscreteTimeDelay_,
FirstOrderLowPassFilter,
FirstOrderTaylorApproximation,
Gain, Gain_,
Integrator, Integrator_,
IsControllable,
IsDetectable,
IsObservable,
IsStabilizable,
Linearize,
LinearSystem, LinearSystem_,
LinearTransformDensity, LinearTransformDensity_,
LogVectorOutput,
MatrixGain,
Multiplexer, Multiplexer_,
MultilayerPerceptron, MultilayerPerceptron_,
ObservabilityMatrix,
PassThrough, PassThrough_,
PerceptronActivationType,
PortSwitch, PortSwitch_,
RandomSource,
Saturation, Saturation_,
SharedPointerSystem, SharedPointerSystem_,
Sine, Sine_,
StateInterpolatorWithDiscreteDerivative,
StateInterpolatorWithDiscreteDerivative_,
SymbolicVectorSystem, SymbolicVectorSystem_,
TrajectoryAffineSystem, TrajectoryAffineSystem_,
TrajectoryLinearSystem, TrajectoryLinearSystem_,
TrajectorySource, TrajectorySource_,
VectorLog, VectorLogSink, VectorLogSink_,
WrapToSystem, WrapToSystem_,
ZeroOrderHold, ZeroOrderHold_,
)
from pydrake.trajectories import PiecewisePolynomial
def compare_value(test, a, b):
# Compares a vector or abstract value.
if isinstance(a, VectorBase):
test.assertTrue(np.allclose(a.get_value(), b.get_value()))
else:
test.assertEqual(type(a.get_value()), type(b.get_value()))
test.assertEqual(a.get_value(), b.get_value())
class TestGeneral(unittest.TestCase):
def _check_instantiations(self, template, supports_symbolic=True):
default_cls = template[None]
self.assertTrue(template[float] is default_cls)
self.assertTrue(template[AutoDiffXd] is not default_cls)
if supports_symbolic:
self.assertTrue(template[Expression] is not default_cls)
def test_instantiations(self):
# TODO(eric.cousineau): Refine tests once NumPy functionality is
# resolved for dtype=object, or dtype=custom is used.
self._check_instantiations(Adder_)
self._check_instantiations(AffineSystem_)
self._check_instantiations(ConstantValueSource_)
self._check_instantiations(ConstantVectorSource_)
self._check_instantiations(Demultiplexer_)
self._check_instantiations(DiscreteDerivative_)
self._check_instantiations(DiscreteTimeDelay_)
self._check_instantiations(Gain_)
self._check_instantiations(Integrator_)
self._check_instantiations(LinearSystem_)
self._check_instantiations(LinearTransformDensity_,
supports_symbolic=False)
self._check_instantiations(Multiplexer_)
self._check_instantiations(MultilayerPerceptron_)
self._check_instantiations(PassThrough_)
self._check_instantiations(PortSwitch_)
self._check_instantiations(Saturation_)
self._check_instantiations(SharedPointerSystem_)
self._check_instantiations(Sine_)
self._check_instantiations(StateInterpolatorWithDiscreteDerivative_)
self._check_instantiations(SymbolicVectorSystem_)
self._check_instantiations(TrajectoryAffineSystem_,
supports_symbolic=False)
self._check_instantiations(TrajectoryLinearSystem_,
supports_symbolic=False)
self._check_instantiations(TrajectorySource_)
self._check_instantiations(VectorLogSink_)
self._check_instantiations(WrapToSystem_)
self._check_instantiations(ZeroOrderHold_)
def test_linear_affine_system(self):
# Just make sure linear system is spelled correctly.
A = np.identity(2)
B = np.array([[0], [1]])
f0 = np.array([[0], [0]])
C = np.array([[0, 1]])
D = [1]
y0 = [0]
system = LinearSystem(A, B, C, D)
context = system.CreateDefaultContext()
self.assertEqual(system.get_input_port(0).size(), 1)
self.assertEqual(context
.get_mutable_continuous_state_vector().size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertTrue((system.A() == A).all())
self.assertTrue((system.B() == B).all())
self.assertTrue((system.f0() == f0).all())
self.assertTrue((system.C() == C).all())
self.assertEqual(system.D(), D)
self.assertEqual(system.y0(), y0)
self.assertEqual(system.time_period(), 0.)
x0 = np.array([1, 2])
system.configure_default_state(x0=x0)
system.SetDefaultContext(context)
np.testing.assert_equal(
context.get_continuous_state_vector().CopyToVector(), x0)
generator = RandomGenerator()
system.SetRandomContext(context, generator)
np.testing.assert_equal(
context.get_continuous_state_vector().CopyToVector(), x0)
system.configure_random_state(covariance=np.eye(2))
system.SetRandomContext(context, generator)
self.assertNotEqual(
context.get_continuous_state_vector().CopyToVector()[1], x0[1])
Co = ControllabilityMatrix(system)
self.assertEqual(Co.shape, (2, 2))
self.assertFalse(IsControllable(system))
self.assertFalse(IsControllable(system, 1e-6))
self.assertFalse(IsStabilizable(sys=system))
self.assertFalse(IsStabilizable(sys=system, threshold=1e-6))
Ob = ObservabilityMatrix(system)
self.assertEqual(Ob.shape, (2, 2))
self.assertFalse(IsObservable(system))
self.assertFalse(IsDetectable(sys=system))
self.assertFalse(IsDetectable(sys=system, threshold=1e-6))
system = AffineSystem(A, B, f0, C, D, y0, .1)
self.assertEqual(system.get_input_port(0), system.get_input_port())
self.assertEqual(system.get_output_port(0), system.get_output_port())
context = system.CreateDefaultContext()
self.assertEqual(system.get_input_port(0).size(), 1)
self.assertEqual(context.get_discrete_state_vector().size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertTrue((system.A() == A).all())
self.assertTrue((system.B() == B).all())
self.assertTrue((system.f0() == f0).all())
self.assertTrue((system.C() == C).all())
self.assertEqual(system.D(), D)
self.assertEqual(system.y0(), y0)
self.assertEqual(system.time_period(), .1)
system.get_input_port(0).FixValue(context, 0)
linearized = Linearize(system, context)
self.assertTrue((linearized.A() == A).all())
taylor = FirstOrderTaylorApproximation(system, context)
self.assertTrue((taylor.y0() == y0).all())
new_A = np.array([[1, 2], [3, 4]])
new_B = np.array([[5], [6]])
new_f0 = np.array([[7], [8]])
new_C = np.array([[9, 10]])
new_D = np.array([[11]])
new_y0 = np.array([12])
system.UpdateCoefficients(
A=new_A, B=new_B, f0=new_f0, C=new_C, D=new_D, y0=new_y0
)
np.testing.assert_equal(new_A, system.A())
np.testing.assert_equal(new_B, system.B())
np.testing.assert_equal(new_f0.flatten(), system.f0())
np.testing.assert_equal(new_C, system.C())
np.testing.assert_equal(new_D, system.D())
np.testing.assert_equal(new_y0, system.y0())
system = MatrixGain(D=A)
self.assertTrue((system.D() == A).all())
system = TrajectoryAffineSystem(
PiecewisePolynomial(A),
PiecewisePolynomial(B),
PiecewisePolynomial(f0),
PiecewisePolynomial(C),
PiecewisePolynomial(D),
PiecewisePolynomial(y0),
.1)
self.assertEqual(system.get_input_port(0), system.get_input_port())
self.assertEqual(system.get_output_port(0), system.get_output_port())
context = system.CreateDefaultContext()
self.assertEqual(system.get_input_port(0).size(), 1)
self.assertEqual(context.get_discrete_state_vector().size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
for t in np.linspace(0., 1., 5):
self.assertTrue((system.A(t) == A).all())
self.assertTrue((system.B(t) == B).all())
self.assertTrue((system.f0(t) == f0).all())
self.assertTrue((system.C(t) == C).all())
self.assertEqual(system.D(t), D)
self.assertEqual(system.y0(t), y0)
self.assertEqual(system.time_period(), .1)
x0 = np.array([1, 2])
system.configure_default_state(x0=x0)
system.SetDefaultContext(context)
np.testing.assert_equal(
context.get_discrete_state_vector().CopyToVector(), x0)
generator = RandomGenerator()
system.SetRandomContext(context, generator)
np.testing.assert_equal(
context.get_discrete_state_vector().CopyToVector(), x0)
system.configure_random_state(covariance=np.eye(2))
system.SetRandomContext(context, generator)
self.assertNotEqual(
context.get_discrete_state_vector().CopyToVector()[1], x0[1])
system = TrajectoryLinearSystem(
A=PiecewisePolynomial(A),
B=PiecewisePolynomial(B),
C=PiecewisePolynomial(C),
D=PiecewisePolynomial(D),
time_period=0.1)
self.assertEqual(system.time_period(), .1)
system.configure_default_state(x0=np.array([1, 2]))
system.configure_random_state(covariance=np.eye(2))
def test_linear_affine_system_empty_matrices(self):
# Confirm the default values for the system matrices in the
# constructor.
def CheckSizes(system, num_states, num_inputs, num_outputs):
self.assertEqual(system.num_continuous_states(), num_states)
self.assertEqual(system.num_inputs(), num_inputs)
self.assertEqual(system.num_outputs(), num_outputs)
# A constant vector system.
system = AffineSystem(y0=[2, 1])
CheckSizes(system, num_states=0, num_inputs=0, num_outputs=2)
# A matrix gain.
system = AffineSystem(D=np.eye(2))
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
system = LinearSystem(D=np.eye(2))
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
# Add an offset.
system = AffineSystem(D=np.eye(2), y0=[1, 2])
CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)
# An integrator.
system = LinearSystem(B=np.eye(2))
CheckSizes(system, num_states=2, num_inputs=2, num_outputs=0)
def test_linear_system_zero_size(self):
# Explicitly test #12633.
num_x = 0
num_y = 2
num_u = 2
A = np.zeros((num_x, num_x))
B = np.zeros((num_x, num_u))
C = np.zeros((num_y, num_x))
D = np.zeros((num_y, num_u))
self.assertIsNotNone(LinearSystem(A, B, C, D))
@numpy_compare.check_nonsymbolic_types
def test_linear_transform_density(self, T):
dut = LinearTransformDensity_[T](
distribution=RandomDistribution.kGaussian,
input_size=3,
output_size=3)
w_in = np.array([T(0.5), T(0.1), T(1.5)])
context = dut.CreateDefaultContext()
dut.get_input_port_w_in().FixValue(context, w_in)
self.assertEqual(dut.get_input_port_A().size(), 9)
self.assertEqual(dut.get_input_port_b().size(), 3)
self.assertEqual(dut.get_distribution(), RandomDistribution.kGaussian)
A = np.array([
[T(0.5), T(1), T(2)], [T(1), T(2), T(3)], [T(3), T(4), T(5)]])
dut.FixConstantA(context=context, A=A)
b = np.array([T(1), T(2), T(3)])
dut.FixConstantB(context=context, b=b)
dut.CalcDensity(context=context)
self.assertEqual(dut.get_output_port_w_out().size(), 3)
self.assertEqual(dut.get_output_port_w_out_density().size(), 1)
def test_vector_pass_through(self):
model_value = BasicVector([1., 2, 3])
system = PassThrough(vector_size=model_value.size())
context = system.CreateDefaultContext()
system.get_input_port(0).FixValue(context, model_value)
output = system.AllocateOutput()
input_eval = system.EvalVectorInput(context, 0)
compare_value(self, input_eval, model_value)
system.CalcOutput(context, output)
output_value = output.get_vector_data(0)
compare_value(self, output_value, model_value)
def test_default_vector_pass_through(self):
model_value = [1., 2, 3]
system = PassThrough(value=model_value)
context = system.CreateDefaultContext()
np.testing.assert_array_equal(
model_value, system.get_output_port().Eval(context))
def test_abstract_pass_through(self):
model_value = Value("Hello world")
system = PassThrough(abstract_model_value=model_value)
context = system.CreateDefaultContext()
system.get_input_port(0).FixValue(context, model_value)
output = system.AllocateOutput()
input_eval = system.EvalAbstractInput(context, 0)
compare_value(self, input_eval, model_value)
system.CalcOutput(context, output)
output_value = output.get_data(0)
compare_value(self, output_value, model_value)
def test_port_switch(self):
system = PortSwitch(vector_size=2)
a = system.DeclareInputPort(name="a")
system.DeclareInputPort(name="b")
context = system.CreateDefaultContext()
self.assertIsInstance(a, InputPort)
system.get_port_selector_input_port().FixValue(context, a.get_index())
def test_first_order_low_pass_filter(self):
filter1 = FirstOrderLowPassFilter(time_constant=3.0, size=4)
self.assertEqual(filter1.get_time_constant(), 3.0)
alpha = np.array([1, 2, 3])
filter2 = FirstOrderLowPassFilter(time_constants=alpha)
np.testing.assert_array_equal(filter2.get_time_constants_vector(),
alpha)
context = filter2.CreateDefaultContext()
filter2.set_initial_output_value(context, [0., -0.2, 0.4])
def test_gain(self):
k = 42.
input_size = 10
systems = [Gain(k=k, size=input_size),
Gain(k=k*np.ones(input_size))]
for system in systems:
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
system.get_input_port(0).FixValue(context, input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(
0).CopyToVector(), expected))
test_input = np.arange(input_size)
mytest(np.arange(input_size), k*np.arange(input_size))
def test_saturation(self):
system = Saturation((0., -1., 3.), (1., 2., 4.))
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
system.get_input_port(0).FixValue(context, input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(
0).CopyToVector(), expected))
mytest((-5., 5., 4.), (0., 2., 4.))
mytest((.4, 0., 3.5), (.4, 0., 3.5))
def test_trajectory_source(self):
ppt = PiecewisePolynomial.FirstOrderHold(
[0., 1.], [[2., 3.], [2., 1.]])
system = TrajectorySource(trajectory=ppt,
output_derivative_order=0,
zero_derivatives_beyond_limits=True)
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
context.SetTime(input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(
0).CopyToVector(), expected))
mytest(0.0, (2.0, 2.0))
mytest(0.5, (2.5, 1.5))
mytest(1.0, (3.0, 1.0))
ppt2 = PiecewisePolynomial.FirstOrderHold(
[0., 1.], [[4., 6.], [4., 2.]])
system.UpdateTrajectory(trajectory=ppt2)
mytest(0.0, (4.0, 4.0))
mytest(0.5, (5.0, 3.0))
mytest(1.0, (6.0, 2.0))
def test_symbolic_vector_system(self):
t = Variable("t")
x = [Variable("x0"), Variable("x1")]
u = [Variable("u0"), Variable("u1")]
system = SymbolicVectorSystem(time=t, state=x, input=u,
dynamics=[x[0] + x[1], t],
output=[u[1]],
time_period=0.0)
context = system.CreateDefaultContext()
self.assertEqual(context.num_continuous_states(), 2)
self.assertEqual(context.num_discrete_state_groups(), 0)
self.assertEqual(system.get_input_port(0).size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertEqual(context.num_abstract_parameters(), 0)
self.assertEqual(context.num_numeric_parameter_groups(), 0)
self.assertTrue(system.dynamics_for_variable(x[0])
.EqualTo(x[0] + x[1]))
self.assertTrue(system.dynamics_for_variable(x[1])
.EqualTo(t))
def test_symbolic_vector_system_parameters(self):
t = Variable("t")
x = [Variable("x0"), Variable("x1")]
u = [Variable("u0"), Variable("u1")]
p = [Variable("p0"), Variable("p1")]
system = SymbolicVectorSystem(time=t, state=x, input=u,
parameter=p,
dynamics=[p[0] * x[0] + x[1] + p[1], t],
output=[u[1]],
time_period=0.0)
context = system.CreateDefaultContext()
self.assertEqual(context.num_continuous_states(), 2)
self.assertEqual(context.num_discrete_state_groups(), 0)
self.assertEqual(system.get_input_port(0).size(), 2)
self.assertEqual(system.get_output_port(0).size(), 1)
self.assertEqual(context.num_abstract_parameters(), 0)
self.assertEqual(context.num_numeric_parameter_groups(), 1)
self.assertEqual(context.get_numeric_parameter(0).size(), 2)
self.assertTrue(system.dynamics_for_variable(x[0])
.EqualTo(p[0] * x[0] + x[1] + p[1]))
self.assertTrue(system.dynamics_for_variable(x[1])
.EqualTo(t))
def test_wrap_to_system(self):
system = WrapToSystem(2)
system.set_interval(1, 1., 2.)
context = system.CreateDefaultContext()
output = system.AllocateOutput()
def mytest(input, expected):
system.get_input_port(0).FixValue(context, input)
system.CalcOutput(context, output)
self.assertTrue(np.allclose(output.get_vector_data(
0).CopyToVector(), expected))
mytest((-1.5, 0.5), (-1.5, 1.5))
mytest((.2, .3), (.2, 1.3))
def test_demultiplexer(self):
# Test demultiplexer with scalar outputs.
demux = Demultiplexer(size=4)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), 4)
numpy_compare.assert_equal(demux.get_output_ports_sizes(),
[1, 1, 1, 1])
input_vec = np.array([1., 2., 3., 4.])
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
for i in range(4):
self.assertTrue(
np.allclose(output.get_vector_data(i).get_value(),
input_vec[i]))
# Test demultiplexer with vector outputs.
demux = Demultiplexer(size=4, output_ports_size=2)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), 2)
numpy_compare.assert_equal(demux.get_output_ports_sizes(), [2, 2])
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
for i in range(2):
self.assertTrue(
np.allclose(output.get_vector_data(i).get_value(),
input_vec[2*i:2*i+2]))
# Test demultiplexer with different output port sizes.
output_ports_sizes = np.array([1, 2, 1])
num_output_ports = output_ports_sizes.size
input_vec = np.array([1., 2., 3., 4.])
demux = Demultiplexer(output_ports_sizes=output_ports_sizes)
context = demux.CreateDefaultContext()
self.assertEqual(demux.num_input_ports(), 1)
self.assertEqual(demux.num_output_ports(), num_output_ports)
numpy_compare.assert_equal(demux.get_output_ports_sizes(),
output_ports_sizes)
demux.get_input_port(0).FixValue(context, input_vec)
output = demux.AllocateOutput()
demux.CalcOutput(context, output)
output_port_start = 0
for i in range(num_output_ports):
output_port_size = output.get_vector_data(i).size()
self.assertTrue(
np.allclose(output.get_vector_data(i).get_value(),
input_vec[output_port_start:
output_port_start+output_port_size]))
output_port_start += output_port_size
def test_multiplexer(self):
my_vector = MyVector2(data=[1., 2.])
test_cases = [
dict(has_vector=False, mux=Multiplexer(num_scalar_inputs=4),
data=[[5.], [3.], [4.], [2.]]),
dict(has_vector=False, mux=Multiplexer(input_sizes=[2, 3]),
data=[[8., 4.], [3., 6., 9.]]),
dict(has_vector=True, mux=Multiplexer(model_vector=my_vector),
data=[[42.], [3.]]),
]
for case in test_cases:
mux = case['mux']
port_size = sum([len(vec) for vec in case['data']])
self.assertEqual(mux.get_output_port(0).size(), port_size)
context = mux.CreateDefaultContext()
output = mux.AllocateOutput()
num_ports = len(case['data'])
self.assertEqual(context.num_input_ports(), num_ports)
for j, vec in enumerate(case['data']):
mux.get_input_port(j).FixValue(context, vec)
mux.CalcOutput(context, output)
self.assertTrue(
np.allclose(output.get_vector_data(0).get_value(),
[elem for vec in case['data'] for elem in vec]))
if case['has_vector']:
# Check the type matches MyVector2.
value = output.get_vector_data(0)
self.assertTrue(isinstance(value, MyVector2))
def test_multilayer_perceptron(self):
mlp = MultilayerPerceptron(
layers=[1, 2, 3], activation_type=PerceptronActivationType.kReLU)
self.assertEqual(mlp.get_input_port().size(), 1)
self.assertEqual(mlp.get_output_port().size(), 3)
context = mlp.CreateDefaultContext()
params = np.zeros((mlp.num_parameters(), 1))
self.assertEqual(mlp.num_parameters(), 13)
self.assertEqual(mlp.layers(), [1, 2, 3])
self.assertEqual(mlp.activation_type(layer=0),
PerceptronActivationType.kReLU)
self.assertEqual(len(mlp.GetParameters(context=context)),
mlp.num_parameters())
mlp.SetWeights(context=context, layer=0, W=np.array([[1], [2]]))
mlp.SetBiases(context=context, layer=0, b=[3, 4])
np.testing.assert_array_equal(
mlp.GetWeights(context=context, layer=0), np.array([[1], [2]]))
np.testing.assert_array_equal(
mlp.GetBiases(context=context, layer=0), np.array([3, 4]))
params = np.zeros(mlp.num_parameters())
mlp.SetWeights(params=params, layer=0, W=np.array([[1], [2]]))
mlp.SetBiases(params=params, layer=0, b=[3, 4])
np.testing.assert_array_equal(
mlp.GetWeights(params=params, layer=0), np.array([[1], [2]]))
np.testing.assert_array_equal(
mlp.GetBiases(params=params, layer=0), np.array([3, 4]))
mutable_params = mlp.GetMutableParameters(context=context)
mutable_params[:] = 3.0
np.testing.assert_array_equal(mlp.GetParameters(context),
np.full(mlp.num_parameters(), 3.0))
global called_loss
called_loss = False
def silly_loss(Y, dloss_dY):
global called_loss
called_loss = True
# We must be careful to update the dloss in place, rather than bind
# a new matrix to the same variable name.
dloss_dY[:] = 1
# dloss_dY = np.array(...etc...) # <== wrong
return Y.sum()
dloss_dparams = np.zeros((13,))
generator = RandomGenerator(23)
mlp.SetRandomContext(context, generator)
mlp.Backpropagation(context=context,
X=np.array([1, 3, 4]).reshape((1, 3)),
loss=silly_loss,
dloss_dparams=dloss_dparams)
self.assertTrue(called_loss)
self.assertTrue(dloss_dparams.any()) # No longer all zero.
dloss_dparams = np.zeros((13,))
mlp.BackpropagationMeanSquaredError(context=context,
X=np.array([1, 3, 4]).reshape(
(1, 3)),
Y_desired=np.eye(3),
dloss_dparams=dloss_dparams)
self.assertTrue(dloss_dparams.any()) # No longer all zero.
Y = np.asfortranarray(np.eye(3))
mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]), Y=Y)
self.assertFalse(np.allclose(Y, np.eye(3)))
Y2 = mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]))
np.testing.assert_array_equal(Y, Y2)
mlp2 = MultilayerPerceptron(layers=[3, 2, 1],
activation_types=[
PerceptronActivationType.kReLU,
PerceptronActivationType.kTanh
])
self.assertEqual(mlp2.activation_type(0),
PerceptronActivationType.kReLU)
self.assertEqual(mlp2.activation_type(1),
PerceptronActivationType.kTanh)
Y = np.asfortranarray(np.full((1, 3), 2.4))
dYdX = np.asfortranarray(np.full((3, 3), 5.3))
context2 = mlp2.CreateDefaultContext()
mlp2.BatchOutput(context=context2, X=np.eye(3), Y=Y, dYdX=dYdX)
# The default context sets the weights and biases to zero, so the
# output (and gradients) should be zero.
np.testing.assert_array_almost_equal(Y, np.zeros((1, 3)))
np.testing.assert_array_almost_equal(dYdX, np.zeros((3, 3)))
mlp = MultilayerPerceptron(use_sin_cos_for_input=[True, False],
remaining_layers=[3, 2],
activation_types=[
PerceptronActivationType.kReLU,
PerceptronActivationType.kTanh
])
self.assertEqual(mlp.get_input_port().size(), 2)
np.testing.assert_array_equal(mlp.layers(), [3, 3, 2])
def test_random_source(self):
source = RandomSource(distribution=RandomDistribution.kUniform,
num_outputs=2, sampling_interval_sec=0.01)
self.assertEqual(source.get_output_port(0).size(), 2)
builder = DiagramBuilder()
# Note: There are no random inputs to add to the empty diagram, but it
# confirms the API works.
AddRandomInputs(sampling_interval_sec=0.01, builder=builder)
builder_ad = DiagramBuilder_[AutoDiffXd]()
AddRandomInputs(sampling_interval_sec=0.01, builder=builder_ad)
def test_constant_vector_source(self):
source = ConstantVectorSource(source_value=[1., 2.])
context = source.CreateDefaultContext()
source.get_source_value(context)
source.get_mutable_source_value(context)
def test_ctor_api(self):
"""Tests construction of systems for systems whose executions semantics
are not tested above.
"""
ConstantValueSource(Value("Hello world"))
DiscreteTimeDelay(update_sec=0.1, delay_time_steps=5, vector_size=2)
DiscreteTimeDelay(
update_sec=0.1, delay_time_steps=5,
abstract_model_value=Value("Hello world"))
with catch_drake_warnings(expected_count=2) as w:
DiscreteTimeDelay(update_sec=0.1, delay_timesteps=5, vector_size=2)
DiscreteTimeDelay(
update_sec=0.1, delay_timesteps=5,
abstract_model_value=Value("Hello world"))
ZeroOrderHold(period_sec=0.1, offset_sec=0.0, vector_size=2)
dut = ZeroOrderHold(period_sec=1.0, offset_sec=0.25,
abstract_model_value=Value("Hello world"))
self.assertEqual(dut.period(), 1.0)
self.assertEqual(dut.offset(), 0.25)
def test_shared_pointer_system_ctor(self):
dut = SharedPointerSystem(value_to_hold=[1, 2, 3])
readback = dut.get()
self.assertListEqual(readback, [1, 2, 3])
del dut
self.assertListEqual(readback, [1, 2, 3])
def test_shared_pointer_system_builder(self):
builder = DiagramBuilder()
self.assertListEqual(
SharedPointerSystem.AddToBuilder(
builder=builder, value_to_hold=[1, 2, 3]),
[1, 2, 3])
diagram = builder.Build()
del builder
readback = diagram.GetSystems()[0].get()
self.assertListEqual(readback, [1, 2, 3])
del diagram
self.assertListEqual(readback, [1, 2, 3])
def test_sine(self):
# Test scalar output.
sine_source = Sine(amplitude=1, frequency=2, phase=3,
size=1, is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 1)
self.assertEqual(sine_source.get_output_port(1).size(), 1)
self.assertEqual(sine_source.get_output_port(2).size(), 1)
# Test vector output.
sine_source = Sine(amplitude=1, frequency=2, phase=3,
size=3, is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 3)
self.assertEqual(sine_source.get_output_port(1).size(), 3)
self.assertEqual(sine_source.get_output_port(2).size(), 3)
sine_source = Sine(amplitudes=np.ones(2), frequencies=np.ones(2),
phases=np.ones(2), is_time_based=True)
self.assertEqual(sine_source.get_output_port(0).size(), 2)
self.assertEqual(sine_source.get_output_port(1).size(), 2)
self.assertEqual(sine_source.get_output_port(2).size(), 2)
def test_discrete_derivative(self):
discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=0.5)
self.assertEqual(discrete_derivative.get_input_port(0).size(), 5)
self.assertEqual(discrete_derivative.get_output_port(0).size(), 5)
self.assertEqual(discrete_derivative.time_step(), 0.5)
self.assertTrue(discrete_derivative.suppress_initial_transient())
discrete_derivative = DiscreteDerivative(
num_inputs=5, time_step=0.5, suppress_initial_transient=False)
self.assertFalse(discrete_derivative.suppress_initial_transient())
def test_state_interpolator_with_discrete_derivative(self):
state_interpolator = StateInterpolatorWithDiscreteDerivative(
num_positions=5, time_step=0.4)
self.assertEqual(state_interpolator.get_input_port(0).size(), 5)
self.assertEqual(state_interpolator.get_output_port(0).size(), 10)
self.assertTrue(state_interpolator.suppress_initial_transient())
# test set_initial_position using context
context = state_interpolator.CreateDefaultContext()
state_interpolator.set_initial_position(
context=context, position=5*[1.1])
np.testing.assert_array_equal(
context.get_discrete_state(0).CopyToVector(),
np.array(5*[1.1]))
np.testing.assert_array_equal(
context.get_discrete_state(1).CopyToVector(),
np.array(5*[1.1]))
# test set_initial_position using state
context = state_interpolator.CreateDefaultContext()
state_interpolator.set_initial_position(
state=context.get_state(), position=5*[1.3])
np.testing.assert_array_equal(
context.get_discrete_state(0).CopyToVector(),
np.array(5*[1.3]))
np.testing.assert_array_equal(
context.get_discrete_state(1).CopyToVector(),
np.array(5*[1.3]))
state_interpolator = StateInterpolatorWithDiscreteDerivative(
num_positions=5, time_step=0.4, suppress_initial_transient=True)
self.assertTrue(state_interpolator.suppress_initial_transient())
@numpy_compare.check_nonsymbolic_types
def test_log_vector_output(self, T):
# Add various redundant loggers to a system, to exercise the
# LogVectorOutput bindings.
builder = DiagramBuilder_[T]()
kSize = 1
integrator = builder.AddSystem(Integrator_[T](kSize))
port = integrator.get_output_port(0)
loggers = []
loggers.append(LogVectorOutput(port, builder))
loggers.append(LogVectorOutput(src=port, builder=builder))
loggers.append(LogVectorOutput(port, builder, 0.125))
loggers.append(LogVectorOutput(
src=port, builder=builder, publish_period=0.125))
loggers.append(LogVectorOutput(port, builder, {TriggerType.kForced}))
loggers.append(LogVectorOutput(
src=port, builder=builder, publish_triggers={TriggerType.kForced}))
loggers.append(LogVectorOutput(
port, builder, {TriggerType.kPeriodic}, 0.125))
loggers.append(LogVectorOutput(
src=port, builder=builder,
publish_triggers={TriggerType.kPeriodic}, publish_period=0.125))
# Check the returned loggers by calling some trivial methods.
diagram = builder.Build()
context = diagram.CreateDefaultContext()
self.assertTrue(all(logger.FindLog(context).num_samples() == 0
for logger in loggers))
@numpy_compare.check_nonsymbolic_types
def test_vector_log(self, T):
kSize = 1
dut = VectorLog(kSize)
self.assertEqual(dut.get_input_size(), kSize)
dut.AddData(0.1, [22.22])
self.assertEqual(dut.num_samples(), 1)
self.assertEqual(dut.sample_times(), [0.1])
self.assertEqual(dut.data(), [22.22])
dut.Clear()
self.assertEqual(dut.num_samples(), 0)
# There is no good way from python to test the semantics of Reserve(),
# but test the binding anyway.
dut.Reserve(VectorLog.kDefaultCapacity * 3)
@numpy_compare.check_nonsymbolic_types
def test_vector_log_sink(self, T):
# Add various redundant loggers to a system, to exercise the
# VectorLog constructor bindings.
builder = DiagramBuilder_[T]()
kSize = 1
constructors = [VectorLogSink_[T]]
loggers = []
if T == float:
constructors.append(VectorLogSink)
for constructor in constructors:
loggers.append(builder.AddSystem(constructor(kSize)))
loggers.append(builder.AddSystem(constructor(input_size=kSize)))
loggers.append(builder.AddSystem(constructor(kSize, 0.125)))
loggers.append(builder.AddSystem(
constructor(input_size=kSize, publish_period=0.125)))
loggers.append(builder.AddSystem(
constructor(kSize, {TriggerType.kForced})))
loggers.append(builder.AddSystem(
constructor(input_size=kSize,
publish_triggers={TriggerType.kForced})))
loggers.append(builder.AddSystem(
constructor(kSize, {TriggerType.kPeriodic}, 0.125)))
loggers.append(builder.AddSystem(
constructor(input_size=kSize,
publish_triggers={TriggerType.kPeriodic},
publish_period=0.125)))
# Exercise all of the log access methods.
diagram = builder.Build()
context = diagram.CreateDefaultContext()
# FindLog and FindMutableLog find the same object.
self.assertTrue(
all(logger.FindLog(context) == logger.FindMutableLog(context)
for logger in loggers))
# Build a list of pairs of loggers and their local contexts.
loggers_and_contexts = [(x, x.GetMyContextFromRoot(context))
for x in loggers]
# GetLog and GetMutableLog find the same object.
self.assertTrue(
all(logger.GetLog(logger_context)
== logger.GetMutableLog(logger_context)
for logger, logger_context in loggers_and_contexts))
# GetLog and FindLog find the same object, given the proper contexts.
self.assertTrue(
all(logger.GetLog(logger_context) == logger.FindLog(context)
for logger, logger_context in loggers_and_contexts))
|
flexible
|
{
"blob_id": "f17ae8a44f8b032feac7c18fe39663054fea40c0",
"index": 5282,
"step-1": "<mask token>\n\n\nclass TestGeneral(unittest.TestCase):\n\n def _check_instantiations(self, template, supports_symbolic=True):\n default_cls = template[None]\n self.assertTrue(template[float] is default_cls)\n self.assertTrue(template[AutoDiffXd] is not default_cls)\n if supports_symbolic:\n self.assertTrue(template[Expression] is not default_cls)\n\n def test_instantiations(self):\n self._check_instantiations(Adder_)\n self._check_instantiations(AffineSystem_)\n self._check_instantiations(ConstantValueSource_)\n self._check_instantiations(ConstantVectorSource_)\n self._check_instantiations(Demultiplexer_)\n self._check_instantiations(DiscreteDerivative_)\n self._check_instantiations(DiscreteTimeDelay_)\n self._check_instantiations(Gain_)\n self._check_instantiations(Integrator_)\n self._check_instantiations(LinearSystem_)\n self._check_instantiations(LinearTransformDensity_,\n supports_symbolic=False)\n self._check_instantiations(Multiplexer_)\n self._check_instantiations(MultilayerPerceptron_)\n self._check_instantiations(PassThrough_)\n self._check_instantiations(PortSwitch_)\n self._check_instantiations(Saturation_)\n self._check_instantiations(SharedPointerSystem_)\n self._check_instantiations(Sine_)\n self._check_instantiations(StateInterpolatorWithDiscreteDerivative_)\n self._check_instantiations(SymbolicVectorSystem_)\n self._check_instantiations(TrajectoryAffineSystem_,\n supports_symbolic=False)\n self._check_instantiations(TrajectoryLinearSystem_,\n supports_symbolic=False)\n self._check_instantiations(TrajectorySource_)\n self._check_instantiations(VectorLogSink_)\n self._check_instantiations(WrapToSystem_)\n self._check_instantiations(ZeroOrderHold_)\n <mask token>\n\n def test_linear_affine_system_empty_matrices(self):\n\n def CheckSizes(system, num_states, num_inputs, num_outputs):\n self.assertEqual(system.num_continuous_states(), num_states)\n self.assertEqual(system.num_inputs(), num_inputs)\n self.assertEqual(system.num_outputs(), num_outputs)\n system = AffineSystem(y0=[2, 1])\n CheckSizes(system, num_states=0, num_inputs=0, num_outputs=2)\n system = AffineSystem(D=np.eye(2))\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = LinearSystem(D=np.eye(2))\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = AffineSystem(D=np.eye(2), y0=[1, 2])\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = LinearSystem(B=np.eye(2))\n CheckSizes(system, num_states=2, num_inputs=2, num_outputs=0)\n\n def test_linear_system_zero_size(self):\n num_x = 0\n num_y = 2\n num_u = 2\n A = np.zeros((num_x, num_x))\n B = np.zeros((num_x, num_u))\n C = np.zeros((num_y, num_x))\n D = np.zeros((num_y, num_u))\n self.assertIsNotNone(LinearSystem(A, B, C, D))\n\n @numpy_compare.check_nonsymbolic_types\n def test_linear_transform_density(self, T):\n dut = LinearTransformDensity_[T](distribution=RandomDistribution.\n kGaussian, input_size=3, output_size=3)\n w_in = np.array([T(0.5), T(0.1), T(1.5)])\n context = dut.CreateDefaultContext()\n dut.get_input_port_w_in().FixValue(context, w_in)\n self.assertEqual(dut.get_input_port_A().size(), 9)\n self.assertEqual(dut.get_input_port_b().size(), 3)\n self.assertEqual(dut.get_distribution(), RandomDistribution.kGaussian)\n A = np.array([[T(0.5), T(1), T(2)], [T(1), T(2), T(3)], [T(3), T(4),\n T(5)]])\n dut.FixConstantA(context=context, A=A)\n b = np.array([T(1), T(2), T(3)])\n dut.FixConstantB(context=context, b=b)\n dut.CalcDensity(context=context)\n self.assertEqual(dut.get_output_port_w_out().size(), 3)\n self.assertEqual(dut.get_output_port_w_out_density().size(), 1)\n\n def test_vector_pass_through(self):\n model_value = BasicVector([1.0, 2, 3])\n system = PassThrough(vector_size=model_value.size())\n context = system.CreateDefaultContext()\n system.get_input_port(0).FixValue(context, model_value)\n output = system.AllocateOutput()\n input_eval = system.EvalVectorInput(context, 0)\n compare_value(self, input_eval, model_value)\n system.CalcOutput(context, output)\n output_value = output.get_vector_data(0)\n compare_value(self, output_value, model_value)\n\n def test_default_vector_pass_through(self):\n model_value = [1.0, 2, 3]\n system = PassThrough(value=model_value)\n context = system.CreateDefaultContext()\n np.testing.assert_array_equal(model_value, system.get_output_port()\n .Eval(context))\n\n def test_abstract_pass_through(self):\n model_value = Value('Hello world')\n system = PassThrough(abstract_model_value=model_value)\n context = system.CreateDefaultContext()\n system.get_input_port(0).FixValue(context, model_value)\n output = system.AllocateOutput()\n input_eval = system.EvalAbstractInput(context, 0)\n compare_value(self, input_eval, model_value)\n system.CalcOutput(context, output)\n output_value = output.get_data(0)\n compare_value(self, output_value, model_value)\n\n def test_port_switch(self):\n system = PortSwitch(vector_size=2)\n a = system.DeclareInputPort(name='a')\n system.DeclareInputPort(name='b')\n context = system.CreateDefaultContext()\n self.assertIsInstance(a, InputPort)\n system.get_port_selector_input_port().FixValue(context, a.get_index())\n\n def test_first_order_low_pass_filter(self):\n filter1 = FirstOrderLowPassFilter(time_constant=3.0, size=4)\n self.assertEqual(filter1.get_time_constant(), 3.0)\n alpha = np.array([1, 2, 3])\n filter2 = FirstOrderLowPassFilter(time_constants=alpha)\n np.testing.assert_array_equal(filter2.get_time_constants_vector(),\n alpha)\n context = filter2.CreateDefaultContext()\n filter2.set_initial_output_value(context, [0.0, -0.2, 0.4])\n <mask token>\n\n def test_saturation(self):\n system = Saturation((0.0, -1.0, 3.0), (1.0, 2.0, 4.0))\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n system.get_input_port(0).FixValue(context, input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).\n CopyToVector(), expected))\n mytest((-5.0, 5.0, 4.0), (0.0, 2.0, 4.0))\n mytest((0.4, 0.0, 3.5), (0.4, 0.0, 3.5))\n\n def test_trajectory_source(self):\n ppt = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[2.0, 3.0], [\n 2.0, 1.0]])\n system = TrajectorySource(trajectory=ppt, output_derivative_order=0,\n zero_derivatives_beyond_limits=True)\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n context.SetTime(input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).\n CopyToVector(), expected))\n mytest(0.0, (2.0, 2.0))\n mytest(0.5, (2.5, 1.5))\n mytest(1.0, (3.0, 1.0))\n ppt2 = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[4.0, 6.0],\n [4.0, 2.0]])\n system.UpdateTrajectory(trajectory=ppt2)\n mytest(0.0, (4.0, 4.0))\n mytest(0.5, (5.0, 3.0))\n mytest(1.0, (6.0, 2.0))\n\n def test_symbolic_vector_system(self):\n t = Variable('t')\n x = [Variable('x0'), Variable('x1')]\n u = [Variable('u0'), Variable('u1')]\n system = SymbolicVectorSystem(time=t, state=x, input=u, dynamics=[x\n [0] + x[1], t], output=[u[1]], time_period=0.0)\n context = system.CreateDefaultContext()\n self.assertEqual(context.num_continuous_states(), 2)\n self.assertEqual(context.num_discrete_state_groups(), 0)\n self.assertEqual(system.get_input_port(0).size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertEqual(context.num_abstract_parameters(), 0)\n self.assertEqual(context.num_numeric_parameter_groups(), 0)\n self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(x[0] + x[1])\n )\n self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))\n\n def test_symbolic_vector_system_parameters(self):\n t = Variable('t')\n x = [Variable('x0'), Variable('x1')]\n u = [Variable('u0'), Variable('u1')]\n p = [Variable('p0'), Variable('p1')]\n system = SymbolicVectorSystem(time=t, state=x, input=u, parameter=p,\n dynamics=[p[0] * x[0] + x[1] + p[1], t], output=[u[1]],\n time_period=0.0)\n context = system.CreateDefaultContext()\n self.assertEqual(context.num_continuous_states(), 2)\n self.assertEqual(context.num_discrete_state_groups(), 0)\n self.assertEqual(system.get_input_port(0).size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertEqual(context.num_abstract_parameters(), 0)\n self.assertEqual(context.num_numeric_parameter_groups(), 1)\n self.assertEqual(context.get_numeric_parameter(0).size(), 2)\n self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(p[0] * x\n [0] + x[1] + p[1]))\n self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))\n\n def test_wrap_to_system(self):\n system = WrapToSystem(2)\n system.set_interval(1, 1.0, 2.0)\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n system.get_input_port(0).FixValue(context, input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).\n CopyToVector(), expected))\n mytest((-1.5, 0.5), (-1.5, 1.5))\n mytest((0.2, 0.3), (0.2, 1.3))\n\n def test_demultiplexer(self):\n demux = Demultiplexer(size=4)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), 4)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(), [1, 1, 1, 1]\n )\n input_vec = np.array([1.0, 2.0, 3.0, 4.0])\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n for i in range(4):\n self.assertTrue(np.allclose(output.get_vector_data(i).get_value\n (), input_vec[i]))\n demux = Demultiplexer(size=4, output_ports_size=2)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), 2)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(), [2, 2])\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n for i in range(2):\n self.assertTrue(np.allclose(output.get_vector_data(i).get_value\n (), input_vec[2 * i:2 * i + 2]))\n output_ports_sizes = np.array([1, 2, 1])\n num_output_ports = output_ports_sizes.size\n input_vec = np.array([1.0, 2.0, 3.0, 4.0])\n demux = Demultiplexer(output_ports_sizes=output_ports_sizes)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), num_output_ports)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(),\n output_ports_sizes)\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n output_port_start = 0\n for i in range(num_output_ports):\n output_port_size = output.get_vector_data(i).size()\n self.assertTrue(np.allclose(output.get_vector_data(i).get_value\n (), input_vec[output_port_start:output_port_start +\n output_port_size]))\n output_port_start += output_port_size\n <mask token>\n\n def test_multilayer_perceptron(self):\n mlp = MultilayerPerceptron(layers=[1, 2, 3], activation_type=\n PerceptronActivationType.kReLU)\n self.assertEqual(mlp.get_input_port().size(), 1)\n self.assertEqual(mlp.get_output_port().size(), 3)\n context = mlp.CreateDefaultContext()\n params = np.zeros((mlp.num_parameters(), 1))\n self.assertEqual(mlp.num_parameters(), 13)\n self.assertEqual(mlp.layers(), [1, 2, 3])\n self.assertEqual(mlp.activation_type(layer=0),\n PerceptronActivationType.kReLU)\n self.assertEqual(len(mlp.GetParameters(context=context)), mlp.\n num_parameters())\n mlp.SetWeights(context=context, layer=0, W=np.array([[1], [2]]))\n mlp.SetBiases(context=context, layer=0, b=[3, 4])\n np.testing.assert_array_equal(mlp.GetWeights(context=context, layer\n =0), np.array([[1], [2]]))\n np.testing.assert_array_equal(mlp.GetBiases(context=context, layer=\n 0), np.array([3, 4]))\n params = np.zeros(mlp.num_parameters())\n mlp.SetWeights(params=params, layer=0, W=np.array([[1], [2]]))\n mlp.SetBiases(params=params, layer=0, b=[3, 4])\n np.testing.assert_array_equal(mlp.GetWeights(params=params, layer=0\n ), np.array([[1], [2]]))\n np.testing.assert_array_equal(mlp.GetBiases(params=params, layer=0),\n np.array([3, 4]))\n mutable_params = mlp.GetMutableParameters(context=context)\n mutable_params[:] = 3.0\n np.testing.assert_array_equal(mlp.GetParameters(context), np.full(\n mlp.num_parameters(), 3.0))\n global called_loss\n called_loss = False\n\n def silly_loss(Y, dloss_dY):\n global called_loss\n called_loss = True\n dloss_dY[:] = 1\n return Y.sum()\n dloss_dparams = np.zeros((13,))\n generator = RandomGenerator(23)\n mlp.SetRandomContext(context, generator)\n mlp.Backpropagation(context=context, X=np.array([1, 3, 4]).reshape(\n (1, 3)), loss=silly_loss, dloss_dparams=dloss_dparams)\n self.assertTrue(called_loss)\n self.assertTrue(dloss_dparams.any())\n dloss_dparams = np.zeros((13,))\n mlp.BackpropagationMeanSquaredError(context=context, X=np.array([1,\n 3, 4]).reshape((1, 3)), Y_desired=np.eye(3), dloss_dparams=\n dloss_dparams)\n self.assertTrue(dloss_dparams.any())\n Y = np.asfortranarray(np.eye(3))\n mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]), Y=Y)\n self.assertFalse(np.allclose(Y, np.eye(3)))\n Y2 = mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]))\n np.testing.assert_array_equal(Y, Y2)\n mlp2 = MultilayerPerceptron(layers=[3, 2, 1], activation_types=[\n PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])\n self.assertEqual(mlp2.activation_type(0), PerceptronActivationType.\n kReLU)\n self.assertEqual(mlp2.activation_type(1), PerceptronActivationType.\n kTanh)\n Y = np.asfortranarray(np.full((1, 3), 2.4))\n dYdX = np.asfortranarray(np.full((3, 3), 5.3))\n context2 = mlp2.CreateDefaultContext()\n mlp2.BatchOutput(context=context2, X=np.eye(3), Y=Y, dYdX=dYdX)\n np.testing.assert_array_almost_equal(Y, np.zeros((1, 3)))\n np.testing.assert_array_almost_equal(dYdX, np.zeros((3, 3)))\n mlp = MultilayerPerceptron(use_sin_cos_for_input=[True, False],\n remaining_layers=[3, 2], activation_types=[\n PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])\n self.assertEqual(mlp.get_input_port().size(), 2)\n np.testing.assert_array_equal(mlp.layers(), [3, 3, 2])\n\n def test_random_source(self):\n source = RandomSource(distribution=RandomDistribution.kUniform,\n num_outputs=2, sampling_interval_sec=0.01)\n self.assertEqual(source.get_output_port(0).size(), 2)\n builder = DiagramBuilder()\n AddRandomInputs(sampling_interval_sec=0.01, builder=builder)\n builder_ad = DiagramBuilder_[AutoDiffXd]()\n AddRandomInputs(sampling_interval_sec=0.01, builder=builder_ad)\n\n def test_constant_vector_source(self):\n source = ConstantVectorSource(source_value=[1.0, 2.0])\n context = source.CreateDefaultContext()\n source.get_source_value(context)\n source.get_mutable_source_value(context)\n <mask token>\n\n def test_shared_pointer_system_ctor(self):\n dut = SharedPointerSystem(value_to_hold=[1, 2, 3])\n readback = dut.get()\n self.assertListEqual(readback, [1, 2, 3])\n del dut\n self.assertListEqual(readback, [1, 2, 3])\n <mask token>\n\n def test_sine(self):\n sine_source = Sine(amplitude=1, frequency=2, phase=3, size=1,\n is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 1)\n self.assertEqual(sine_source.get_output_port(1).size(), 1)\n self.assertEqual(sine_source.get_output_port(2).size(), 1)\n sine_source = Sine(amplitude=1, frequency=2, phase=3, size=3,\n is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 3)\n self.assertEqual(sine_source.get_output_port(1).size(), 3)\n self.assertEqual(sine_source.get_output_port(2).size(), 3)\n sine_source = Sine(amplitudes=np.ones(2), frequencies=np.ones(2),\n phases=np.ones(2), is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 2)\n self.assertEqual(sine_source.get_output_port(1).size(), 2)\n self.assertEqual(sine_source.get_output_port(2).size(), 2)\n\n def test_discrete_derivative(self):\n discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=0.5)\n self.assertEqual(discrete_derivative.get_input_port(0).size(), 5)\n self.assertEqual(discrete_derivative.get_output_port(0).size(), 5)\n self.assertEqual(discrete_derivative.time_step(), 0.5)\n self.assertTrue(discrete_derivative.suppress_initial_transient())\n discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=\n 0.5, suppress_initial_transient=False)\n self.assertFalse(discrete_derivative.suppress_initial_transient())\n <mask token>\n\n @numpy_compare.check_nonsymbolic_types\n def test_log_vector_output(self, T):\n builder = DiagramBuilder_[T]()\n kSize = 1\n integrator = builder.AddSystem(Integrator_[T](kSize))\n port = integrator.get_output_port(0)\n loggers = []\n loggers.append(LogVectorOutput(port, builder))\n loggers.append(LogVectorOutput(src=port, builder=builder))\n loggers.append(LogVectorOutput(port, builder, 0.125))\n loggers.append(LogVectorOutput(src=port, builder=builder,\n publish_period=0.125))\n loggers.append(LogVectorOutput(port, builder, {TriggerType.kForced}))\n loggers.append(LogVectorOutput(src=port, builder=builder,\n publish_triggers={TriggerType.kForced}))\n loggers.append(LogVectorOutput(port, builder, {TriggerType.\n kPeriodic}, 0.125))\n loggers.append(LogVectorOutput(src=port, builder=builder,\n publish_triggers={TriggerType.kPeriodic}, publish_period=0.125))\n diagram = builder.Build()\n context = diagram.CreateDefaultContext()\n self.assertTrue(all(logger.FindLog(context).num_samples() == 0 for\n logger in loggers))\n <mask token>\n\n @numpy_compare.check_nonsymbolic_types\n def test_vector_log_sink(self, T):\n builder = DiagramBuilder_[T]()\n kSize = 1\n constructors = [VectorLogSink_[T]]\n loggers = []\n if T == float:\n constructors.append(VectorLogSink)\n for constructor in constructors:\n loggers.append(builder.AddSystem(constructor(kSize)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize)))\n loggers.append(builder.AddSystem(constructor(kSize, 0.125)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize,\n publish_period=0.125)))\n loggers.append(builder.AddSystem(constructor(kSize, {\n TriggerType.kForced})))\n loggers.append(builder.AddSystem(constructor(input_size=kSize,\n publish_triggers={TriggerType.kForced})))\n loggers.append(builder.AddSystem(constructor(kSize, {\n TriggerType.kPeriodic}, 0.125)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize,\n publish_triggers={TriggerType.kPeriodic}, publish_period=\n 0.125)))\n diagram = builder.Build()\n context = diagram.CreateDefaultContext()\n self.assertTrue(all(logger.FindLog(context) == logger.\n FindMutableLog(context) for logger in loggers))\n loggers_and_contexts = [(x, x.GetMyContextFromRoot(context)) for x in\n loggers]\n self.assertTrue(all(logger.GetLog(logger_context) == logger.\n GetMutableLog(logger_context) for logger, logger_context in\n loggers_and_contexts))\n self.assertTrue(all(logger.GetLog(logger_context) == logger.FindLog\n (context) for logger, logger_context in loggers_and_contexts))\n",
"step-2": "<mask token>\n\n\nclass TestGeneral(unittest.TestCase):\n\n def _check_instantiations(self, template, supports_symbolic=True):\n default_cls = template[None]\n self.assertTrue(template[float] is default_cls)\n self.assertTrue(template[AutoDiffXd] is not default_cls)\n if supports_symbolic:\n self.assertTrue(template[Expression] is not default_cls)\n\n def test_instantiations(self):\n self._check_instantiations(Adder_)\n self._check_instantiations(AffineSystem_)\n self._check_instantiations(ConstantValueSource_)\n self._check_instantiations(ConstantVectorSource_)\n self._check_instantiations(Demultiplexer_)\n self._check_instantiations(DiscreteDerivative_)\n self._check_instantiations(DiscreteTimeDelay_)\n self._check_instantiations(Gain_)\n self._check_instantiations(Integrator_)\n self._check_instantiations(LinearSystem_)\n self._check_instantiations(LinearTransformDensity_,\n supports_symbolic=False)\n self._check_instantiations(Multiplexer_)\n self._check_instantiations(MultilayerPerceptron_)\n self._check_instantiations(PassThrough_)\n self._check_instantiations(PortSwitch_)\n self._check_instantiations(Saturation_)\n self._check_instantiations(SharedPointerSystem_)\n self._check_instantiations(Sine_)\n self._check_instantiations(StateInterpolatorWithDiscreteDerivative_)\n self._check_instantiations(SymbolicVectorSystem_)\n self._check_instantiations(TrajectoryAffineSystem_,\n supports_symbolic=False)\n self._check_instantiations(TrajectoryLinearSystem_,\n supports_symbolic=False)\n self._check_instantiations(TrajectorySource_)\n self._check_instantiations(VectorLogSink_)\n self._check_instantiations(WrapToSystem_)\n self._check_instantiations(ZeroOrderHold_)\n <mask token>\n\n def test_linear_affine_system_empty_matrices(self):\n\n def CheckSizes(system, num_states, num_inputs, num_outputs):\n self.assertEqual(system.num_continuous_states(), num_states)\n self.assertEqual(system.num_inputs(), num_inputs)\n self.assertEqual(system.num_outputs(), num_outputs)\n system = AffineSystem(y0=[2, 1])\n CheckSizes(system, num_states=0, num_inputs=0, num_outputs=2)\n system = AffineSystem(D=np.eye(2))\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = LinearSystem(D=np.eye(2))\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = AffineSystem(D=np.eye(2), y0=[1, 2])\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = LinearSystem(B=np.eye(2))\n CheckSizes(system, num_states=2, num_inputs=2, num_outputs=0)\n\n def test_linear_system_zero_size(self):\n num_x = 0\n num_y = 2\n num_u = 2\n A = np.zeros((num_x, num_x))\n B = np.zeros((num_x, num_u))\n C = np.zeros((num_y, num_x))\n D = np.zeros((num_y, num_u))\n self.assertIsNotNone(LinearSystem(A, B, C, D))\n\n @numpy_compare.check_nonsymbolic_types\n def test_linear_transform_density(self, T):\n dut = LinearTransformDensity_[T](distribution=RandomDistribution.\n kGaussian, input_size=3, output_size=3)\n w_in = np.array([T(0.5), T(0.1), T(1.5)])\n context = dut.CreateDefaultContext()\n dut.get_input_port_w_in().FixValue(context, w_in)\n self.assertEqual(dut.get_input_port_A().size(), 9)\n self.assertEqual(dut.get_input_port_b().size(), 3)\n self.assertEqual(dut.get_distribution(), RandomDistribution.kGaussian)\n A = np.array([[T(0.5), T(1), T(2)], [T(1), T(2), T(3)], [T(3), T(4),\n T(5)]])\n dut.FixConstantA(context=context, A=A)\n b = np.array([T(1), T(2), T(3)])\n dut.FixConstantB(context=context, b=b)\n dut.CalcDensity(context=context)\n self.assertEqual(dut.get_output_port_w_out().size(), 3)\n self.assertEqual(dut.get_output_port_w_out_density().size(), 1)\n\n def test_vector_pass_through(self):\n model_value = BasicVector([1.0, 2, 3])\n system = PassThrough(vector_size=model_value.size())\n context = system.CreateDefaultContext()\n system.get_input_port(0).FixValue(context, model_value)\n output = system.AllocateOutput()\n input_eval = system.EvalVectorInput(context, 0)\n compare_value(self, input_eval, model_value)\n system.CalcOutput(context, output)\n output_value = output.get_vector_data(0)\n compare_value(self, output_value, model_value)\n\n def test_default_vector_pass_through(self):\n model_value = [1.0, 2, 3]\n system = PassThrough(value=model_value)\n context = system.CreateDefaultContext()\n np.testing.assert_array_equal(model_value, system.get_output_port()\n .Eval(context))\n\n def test_abstract_pass_through(self):\n model_value = Value('Hello world')\n system = PassThrough(abstract_model_value=model_value)\n context = system.CreateDefaultContext()\n system.get_input_port(0).FixValue(context, model_value)\n output = system.AllocateOutput()\n input_eval = system.EvalAbstractInput(context, 0)\n compare_value(self, input_eval, model_value)\n system.CalcOutput(context, output)\n output_value = output.get_data(0)\n compare_value(self, output_value, model_value)\n\n def test_port_switch(self):\n system = PortSwitch(vector_size=2)\n a = system.DeclareInputPort(name='a')\n system.DeclareInputPort(name='b')\n context = system.CreateDefaultContext()\n self.assertIsInstance(a, InputPort)\n system.get_port_selector_input_port().FixValue(context, a.get_index())\n\n def test_first_order_low_pass_filter(self):\n filter1 = FirstOrderLowPassFilter(time_constant=3.0, size=4)\n self.assertEqual(filter1.get_time_constant(), 3.0)\n alpha = np.array([1, 2, 3])\n filter2 = FirstOrderLowPassFilter(time_constants=alpha)\n np.testing.assert_array_equal(filter2.get_time_constants_vector(),\n alpha)\n context = filter2.CreateDefaultContext()\n filter2.set_initial_output_value(context, [0.0, -0.2, 0.4])\n <mask token>\n\n def test_saturation(self):\n system = Saturation((0.0, -1.0, 3.0), (1.0, 2.0, 4.0))\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n system.get_input_port(0).FixValue(context, input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).\n CopyToVector(), expected))\n mytest((-5.0, 5.0, 4.0), (0.0, 2.0, 4.0))\n mytest((0.4, 0.0, 3.5), (0.4, 0.0, 3.5))\n\n def test_trajectory_source(self):\n ppt = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[2.0, 3.0], [\n 2.0, 1.0]])\n system = TrajectorySource(trajectory=ppt, output_derivative_order=0,\n zero_derivatives_beyond_limits=True)\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n context.SetTime(input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).\n CopyToVector(), expected))\n mytest(0.0, (2.0, 2.0))\n mytest(0.5, (2.5, 1.5))\n mytest(1.0, (3.0, 1.0))\n ppt2 = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[4.0, 6.0],\n [4.0, 2.0]])\n system.UpdateTrajectory(trajectory=ppt2)\n mytest(0.0, (4.0, 4.0))\n mytest(0.5, (5.0, 3.0))\n mytest(1.0, (6.0, 2.0))\n\n def test_symbolic_vector_system(self):\n t = Variable('t')\n x = [Variable('x0'), Variable('x1')]\n u = [Variable('u0'), Variable('u1')]\n system = SymbolicVectorSystem(time=t, state=x, input=u, dynamics=[x\n [0] + x[1], t], output=[u[1]], time_period=0.0)\n context = system.CreateDefaultContext()\n self.assertEqual(context.num_continuous_states(), 2)\n self.assertEqual(context.num_discrete_state_groups(), 0)\n self.assertEqual(system.get_input_port(0).size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertEqual(context.num_abstract_parameters(), 0)\n self.assertEqual(context.num_numeric_parameter_groups(), 0)\n self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(x[0] + x[1])\n )\n self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))\n\n def test_symbolic_vector_system_parameters(self):\n t = Variable('t')\n x = [Variable('x0'), Variable('x1')]\n u = [Variable('u0'), Variable('u1')]\n p = [Variable('p0'), Variable('p1')]\n system = SymbolicVectorSystem(time=t, state=x, input=u, parameter=p,\n dynamics=[p[0] * x[0] + x[1] + p[1], t], output=[u[1]],\n time_period=0.0)\n context = system.CreateDefaultContext()\n self.assertEqual(context.num_continuous_states(), 2)\n self.assertEqual(context.num_discrete_state_groups(), 0)\n self.assertEqual(system.get_input_port(0).size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertEqual(context.num_abstract_parameters(), 0)\n self.assertEqual(context.num_numeric_parameter_groups(), 1)\n self.assertEqual(context.get_numeric_parameter(0).size(), 2)\n self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(p[0] * x\n [0] + x[1] + p[1]))\n self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))\n\n def test_wrap_to_system(self):\n system = WrapToSystem(2)\n system.set_interval(1, 1.0, 2.0)\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n system.get_input_port(0).FixValue(context, input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).\n CopyToVector(), expected))\n mytest((-1.5, 0.5), (-1.5, 1.5))\n mytest((0.2, 0.3), (0.2, 1.3))\n\n def test_demultiplexer(self):\n demux = Demultiplexer(size=4)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), 4)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(), [1, 1, 1, 1]\n )\n input_vec = np.array([1.0, 2.0, 3.0, 4.0])\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n for i in range(4):\n self.assertTrue(np.allclose(output.get_vector_data(i).get_value\n (), input_vec[i]))\n demux = Demultiplexer(size=4, output_ports_size=2)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), 2)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(), [2, 2])\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n for i in range(2):\n self.assertTrue(np.allclose(output.get_vector_data(i).get_value\n (), input_vec[2 * i:2 * i + 2]))\n output_ports_sizes = np.array([1, 2, 1])\n num_output_ports = output_ports_sizes.size\n input_vec = np.array([1.0, 2.0, 3.0, 4.0])\n demux = Demultiplexer(output_ports_sizes=output_ports_sizes)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), num_output_ports)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(),\n output_ports_sizes)\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n output_port_start = 0\n for i in range(num_output_ports):\n output_port_size = output.get_vector_data(i).size()\n self.assertTrue(np.allclose(output.get_vector_data(i).get_value\n (), input_vec[output_port_start:output_port_start +\n output_port_size]))\n output_port_start += output_port_size\n <mask token>\n\n def test_multilayer_perceptron(self):\n mlp = MultilayerPerceptron(layers=[1, 2, 3], activation_type=\n PerceptronActivationType.kReLU)\n self.assertEqual(mlp.get_input_port().size(), 1)\n self.assertEqual(mlp.get_output_port().size(), 3)\n context = mlp.CreateDefaultContext()\n params = np.zeros((mlp.num_parameters(), 1))\n self.assertEqual(mlp.num_parameters(), 13)\n self.assertEqual(mlp.layers(), [1, 2, 3])\n self.assertEqual(mlp.activation_type(layer=0),\n PerceptronActivationType.kReLU)\n self.assertEqual(len(mlp.GetParameters(context=context)), mlp.\n num_parameters())\n mlp.SetWeights(context=context, layer=0, W=np.array([[1], [2]]))\n mlp.SetBiases(context=context, layer=0, b=[3, 4])\n np.testing.assert_array_equal(mlp.GetWeights(context=context, layer\n =0), np.array([[1], [2]]))\n np.testing.assert_array_equal(mlp.GetBiases(context=context, layer=\n 0), np.array([3, 4]))\n params = np.zeros(mlp.num_parameters())\n mlp.SetWeights(params=params, layer=0, W=np.array([[1], [2]]))\n mlp.SetBiases(params=params, layer=0, b=[3, 4])\n np.testing.assert_array_equal(mlp.GetWeights(params=params, layer=0\n ), np.array([[1], [2]]))\n np.testing.assert_array_equal(mlp.GetBiases(params=params, layer=0),\n np.array([3, 4]))\n mutable_params = mlp.GetMutableParameters(context=context)\n mutable_params[:] = 3.0\n np.testing.assert_array_equal(mlp.GetParameters(context), np.full(\n mlp.num_parameters(), 3.0))\n global called_loss\n called_loss = False\n\n def silly_loss(Y, dloss_dY):\n global called_loss\n called_loss = True\n dloss_dY[:] = 1\n return Y.sum()\n dloss_dparams = np.zeros((13,))\n generator = RandomGenerator(23)\n mlp.SetRandomContext(context, generator)\n mlp.Backpropagation(context=context, X=np.array([1, 3, 4]).reshape(\n (1, 3)), loss=silly_loss, dloss_dparams=dloss_dparams)\n self.assertTrue(called_loss)\n self.assertTrue(dloss_dparams.any())\n dloss_dparams = np.zeros((13,))\n mlp.BackpropagationMeanSquaredError(context=context, X=np.array([1,\n 3, 4]).reshape((1, 3)), Y_desired=np.eye(3), dloss_dparams=\n dloss_dparams)\n self.assertTrue(dloss_dparams.any())\n Y = np.asfortranarray(np.eye(3))\n mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]), Y=Y)\n self.assertFalse(np.allclose(Y, np.eye(3)))\n Y2 = mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]))\n np.testing.assert_array_equal(Y, Y2)\n mlp2 = MultilayerPerceptron(layers=[3, 2, 1], activation_types=[\n PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])\n self.assertEqual(mlp2.activation_type(0), PerceptronActivationType.\n kReLU)\n self.assertEqual(mlp2.activation_type(1), PerceptronActivationType.\n kTanh)\n Y = np.asfortranarray(np.full((1, 3), 2.4))\n dYdX = np.asfortranarray(np.full((3, 3), 5.3))\n context2 = mlp2.CreateDefaultContext()\n mlp2.BatchOutput(context=context2, X=np.eye(3), Y=Y, dYdX=dYdX)\n np.testing.assert_array_almost_equal(Y, np.zeros((1, 3)))\n np.testing.assert_array_almost_equal(dYdX, np.zeros((3, 3)))\n mlp = MultilayerPerceptron(use_sin_cos_for_input=[True, False],\n remaining_layers=[3, 2], activation_types=[\n PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])\n self.assertEqual(mlp.get_input_port().size(), 2)\n np.testing.assert_array_equal(mlp.layers(), [3, 3, 2])\n\n def test_random_source(self):\n source = RandomSource(distribution=RandomDistribution.kUniform,\n num_outputs=2, sampling_interval_sec=0.01)\n self.assertEqual(source.get_output_port(0).size(), 2)\n builder = DiagramBuilder()\n AddRandomInputs(sampling_interval_sec=0.01, builder=builder)\n builder_ad = DiagramBuilder_[AutoDiffXd]()\n AddRandomInputs(sampling_interval_sec=0.01, builder=builder_ad)\n\n def test_constant_vector_source(self):\n source = ConstantVectorSource(source_value=[1.0, 2.0])\n context = source.CreateDefaultContext()\n source.get_source_value(context)\n source.get_mutable_source_value(context)\n <mask token>\n\n def test_shared_pointer_system_ctor(self):\n dut = SharedPointerSystem(value_to_hold=[1, 2, 3])\n readback = dut.get()\n self.assertListEqual(readback, [1, 2, 3])\n del dut\n self.assertListEqual(readback, [1, 2, 3])\n\n def test_shared_pointer_system_builder(self):\n builder = DiagramBuilder()\n self.assertListEqual(SharedPointerSystem.AddToBuilder(builder=\n builder, value_to_hold=[1, 2, 3]), [1, 2, 3])\n diagram = builder.Build()\n del builder\n readback = diagram.GetSystems()[0].get()\n self.assertListEqual(readback, [1, 2, 3])\n del diagram\n self.assertListEqual(readback, [1, 2, 3])\n\n def test_sine(self):\n sine_source = Sine(amplitude=1, frequency=2, phase=3, size=1,\n is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 1)\n self.assertEqual(sine_source.get_output_port(1).size(), 1)\n self.assertEqual(sine_source.get_output_port(2).size(), 1)\n sine_source = Sine(amplitude=1, frequency=2, phase=3, size=3,\n is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 3)\n self.assertEqual(sine_source.get_output_port(1).size(), 3)\n self.assertEqual(sine_source.get_output_port(2).size(), 3)\n sine_source = Sine(amplitudes=np.ones(2), frequencies=np.ones(2),\n phases=np.ones(2), is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 2)\n self.assertEqual(sine_source.get_output_port(1).size(), 2)\n self.assertEqual(sine_source.get_output_port(2).size(), 2)\n\n def test_discrete_derivative(self):\n discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=0.5)\n self.assertEqual(discrete_derivative.get_input_port(0).size(), 5)\n self.assertEqual(discrete_derivative.get_output_port(0).size(), 5)\n self.assertEqual(discrete_derivative.time_step(), 0.5)\n self.assertTrue(discrete_derivative.suppress_initial_transient())\n discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=\n 0.5, suppress_initial_transient=False)\n self.assertFalse(discrete_derivative.suppress_initial_transient())\n <mask token>\n\n @numpy_compare.check_nonsymbolic_types\n def test_log_vector_output(self, T):\n builder = DiagramBuilder_[T]()\n kSize = 1\n integrator = builder.AddSystem(Integrator_[T](kSize))\n port = integrator.get_output_port(0)\n loggers = []\n loggers.append(LogVectorOutput(port, builder))\n loggers.append(LogVectorOutput(src=port, builder=builder))\n loggers.append(LogVectorOutput(port, builder, 0.125))\n loggers.append(LogVectorOutput(src=port, builder=builder,\n publish_period=0.125))\n loggers.append(LogVectorOutput(port, builder, {TriggerType.kForced}))\n loggers.append(LogVectorOutput(src=port, builder=builder,\n publish_triggers={TriggerType.kForced}))\n loggers.append(LogVectorOutput(port, builder, {TriggerType.\n kPeriodic}, 0.125))\n loggers.append(LogVectorOutput(src=port, builder=builder,\n publish_triggers={TriggerType.kPeriodic}, publish_period=0.125))\n diagram = builder.Build()\n context = diagram.CreateDefaultContext()\n self.assertTrue(all(logger.FindLog(context).num_samples() == 0 for\n logger in loggers))\n <mask token>\n\n @numpy_compare.check_nonsymbolic_types\n def test_vector_log_sink(self, T):\n builder = DiagramBuilder_[T]()\n kSize = 1\n constructors = [VectorLogSink_[T]]\n loggers = []\n if T == float:\n constructors.append(VectorLogSink)\n for constructor in constructors:\n loggers.append(builder.AddSystem(constructor(kSize)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize)))\n loggers.append(builder.AddSystem(constructor(kSize, 0.125)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize,\n publish_period=0.125)))\n loggers.append(builder.AddSystem(constructor(kSize, {\n TriggerType.kForced})))\n loggers.append(builder.AddSystem(constructor(input_size=kSize,\n publish_triggers={TriggerType.kForced})))\n loggers.append(builder.AddSystem(constructor(kSize, {\n TriggerType.kPeriodic}, 0.125)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize,\n publish_triggers={TriggerType.kPeriodic}, publish_period=\n 0.125)))\n diagram = builder.Build()\n context = diagram.CreateDefaultContext()\n self.assertTrue(all(logger.FindLog(context) == logger.\n FindMutableLog(context) for logger in loggers))\n loggers_and_contexts = [(x, x.GetMyContextFromRoot(context)) for x in\n loggers]\n self.assertTrue(all(logger.GetLog(logger_context) == logger.\n GetMutableLog(logger_context) for logger, logger_context in\n loggers_and_contexts))\n self.assertTrue(all(logger.GetLog(logger_context) == logger.FindLog\n (context) for logger, logger_context in loggers_and_contexts))\n",
"step-3": "<mask token>\n\n\nclass TestGeneral(unittest.TestCase):\n\n def _check_instantiations(self, template, supports_symbolic=True):\n default_cls = template[None]\n self.assertTrue(template[float] is default_cls)\n self.assertTrue(template[AutoDiffXd] is not default_cls)\n if supports_symbolic:\n self.assertTrue(template[Expression] is not default_cls)\n\n def test_instantiations(self):\n self._check_instantiations(Adder_)\n self._check_instantiations(AffineSystem_)\n self._check_instantiations(ConstantValueSource_)\n self._check_instantiations(ConstantVectorSource_)\n self._check_instantiations(Demultiplexer_)\n self._check_instantiations(DiscreteDerivative_)\n self._check_instantiations(DiscreteTimeDelay_)\n self._check_instantiations(Gain_)\n self._check_instantiations(Integrator_)\n self._check_instantiations(LinearSystem_)\n self._check_instantiations(LinearTransformDensity_,\n supports_symbolic=False)\n self._check_instantiations(Multiplexer_)\n self._check_instantiations(MultilayerPerceptron_)\n self._check_instantiations(PassThrough_)\n self._check_instantiations(PortSwitch_)\n self._check_instantiations(Saturation_)\n self._check_instantiations(SharedPointerSystem_)\n self._check_instantiations(Sine_)\n self._check_instantiations(StateInterpolatorWithDiscreteDerivative_)\n self._check_instantiations(SymbolicVectorSystem_)\n self._check_instantiations(TrajectoryAffineSystem_,\n supports_symbolic=False)\n self._check_instantiations(TrajectoryLinearSystem_,\n supports_symbolic=False)\n self._check_instantiations(TrajectorySource_)\n self._check_instantiations(VectorLogSink_)\n self._check_instantiations(WrapToSystem_)\n self._check_instantiations(ZeroOrderHold_)\n\n def test_linear_affine_system(self):\n A = np.identity(2)\n B = np.array([[0], [1]])\n f0 = np.array([[0], [0]])\n C = np.array([[0, 1]])\n D = [1]\n y0 = [0]\n system = LinearSystem(A, B, C, D)\n context = system.CreateDefaultContext()\n self.assertEqual(system.get_input_port(0).size(), 1)\n self.assertEqual(context.get_mutable_continuous_state_vector().size\n (), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertTrue((system.A() == A).all())\n self.assertTrue((system.B() == B).all())\n self.assertTrue((system.f0() == f0).all())\n self.assertTrue((system.C() == C).all())\n self.assertEqual(system.D(), D)\n self.assertEqual(system.y0(), y0)\n self.assertEqual(system.time_period(), 0.0)\n x0 = np.array([1, 2])\n system.configure_default_state(x0=x0)\n system.SetDefaultContext(context)\n np.testing.assert_equal(context.get_continuous_state_vector().\n CopyToVector(), x0)\n generator = RandomGenerator()\n system.SetRandomContext(context, generator)\n np.testing.assert_equal(context.get_continuous_state_vector().\n CopyToVector(), x0)\n system.configure_random_state(covariance=np.eye(2))\n system.SetRandomContext(context, generator)\n self.assertNotEqual(context.get_continuous_state_vector().\n CopyToVector()[1], x0[1])\n Co = ControllabilityMatrix(system)\n self.assertEqual(Co.shape, (2, 2))\n self.assertFalse(IsControllable(system))\n self.assertFalse(IsControllable(system, 1e-06))\n self.assertFalse(IsStabilizable(sys=system))\n self.assertFalse(IsStabilizable(sys=system, threshold=1e-06))\n Ob = ObservabilityMatrix(system)\n self.assertEqual(Ob.shape, (2, 2))\n self.assertFalse(IsObservable(system))\n self.assertFalse(IsDetectable(sys=system))\n self.assertFalse(IsDetectable(sys=system, threshold=1e-06))\n system = AffineSystem(A, B, f0, C, D, y0, 0.1)\n self.assertEqual(system.get_input_port(0), system.get_input_port())\n self.assertEqual(system.get_output_port(0), system.get_output_port())\n context = system.CreateDefaultContext()\n self.assertEqual(system.get_input_port(0).size(), 1)\n self.assertEqual(context.get_discrete_state_vector().size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertTrue((system.A() == A).all())\n self.assertTrue((system.B() == B).all())\n self.assertTrue((system.f0() == f0).all())\n self.assertTrue((system.C() == C).all())\n self.assertEqual(system.D(), D)\n self.assertEqual(system.y0(), y0)\n self.assertEqual(system.time_period(), 0.1)\n system.get_input_port(0).FixValue(context, 0)\n linearized = Linearize(system, context)\n self.assertTrue((linearized.A() == A).all())\n taylor = FirstOrderTaylorApproximation(system, context)\n self.assertTrue((taylor.y0() == y0).all())\n new_A = np.array([[1, 2], [3, 4]])\n new_B = np.array([[5], [6]])\n new_f0 = np.array([[7], [8]])\n new_C = np.array([[9, 10]])\n new_D = np.array([[11]])\n new_y0 = np.array([12])\n system.UpdateCoefficients(A=new_A, B=new_B, f0=new_f0, C=new_C, D=\n new_D, y0=new_y0)\n np.testing.assert_equal(new_A, system.A())\n np.testing.assert_equal(new_B, system.B())\n np.testing.assert_equal(new_f0.flatten(), system.f0())\n np.testing.assert_equal(new_C, system.C())\n np.testing.assert_equal(new_D, system.D())\n np.testing.assert_equal(new_y0, system.y0())\n system = MatrixGain(D=A)\n self.assertTrue((system.D() == A).all())\n system = TrajectoryAffineSystem(PiecewisePolynomial(A),\n PiecewisePolynomial(B), PiecewisePolynomial(f0),\n PiecewisePolynomial(C), PiecewisePolynomial(D),\n PiecewisePolynomial(y0), 0.1)\n self.assertEqual(system.get_input_port(0), system.get_input_port())\n self.assertEqual(system.get_output_port(0), system.get_output_port())\n context = system.CreateDefaultContext()\n self.assertEqual(system.get_input_port(0).size(), 1)\n self.assertEqual(context.get_discrete_state_vector().size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n for t in np.linspace(0.0, 1.0, 5):\n self.assertTrue((system.A(t) == A).all())\n self.assertTrue((system.B(t) == B).all())\n self.assertTrue((system.f0(t) == f0).all())\n self.assertTrue((system.C(t) == C).all())\n self.assertEqual(system.D(t), D)\n self.assertEqual(system.y0(t), y0)\n self.assertEqual(system.time_period(), 0.1)\n x0 = np.array([1, 2])\n system.configure_default_state(x0=x0)\n system.SetDefaultContext(context)\n np.testing.assert_equal(context.get_discrete_state_vector().\n CopyToVector(), x0)\n generator = RandomGenerator()\n system.SetRandomContext(context, generator)\n np.testing.assert_equal(context.get_discrete_state_vector().\n CopyToVector(), x0)\n system.configure_random_state(covariance=np.eye(2))\n system.SetRandomContext(context, generator)\n self.assertNotEqual(context.get_discrete_state_vector().\n CopyToVector()[1], x0[1])\n system = TrajectoryLinearSystem(A=PiecewisePolynomial(A), B=\n PiecewisePolynomial(B), C=PiecewisePolynomial(C), D=\n PiecewisePolynomial(D), time_period=0.1)\n self.assertEqual(system.time_period(), 0.1)\n system.configure_default_state(x0=np.array([1, 2]))\n system.configure_random_state(covariance=np.eye(2))\n\n def test_linear_affine_system_empty_matrices(self):\n\n def CheckSizes(system, num_states, num_inputs, num_outputs):\n self.assertEqual(system.num_continuous_states(), num_states)\n self.assertEqual(system.num_inputs(), num_inputs)\n self.assertEqual(system.num_outputs(), num_outputs)\n system = AffineSystem(y0=[2, 1])\n CheckSizes(system, num_states=0, num_inputs=0, num_outputs=2)\n system = AffineSystem(D=np.eye(2))\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = LinearSystem(D=np.eye(2))\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = AffineSystem(D=np.eye(2), y0=[1, 2])\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = LinearSystem(B=np.eye(2))\n CheckSizes(system, num_states=2, num_inputs=2, num_outputs=0)\n\n def test_linear_system_zero_size(self):\n num_x = 0\n num_y = 2\n num_u = 2\n A = np.zeros((num_x, num_x))\n B = np.zeros((num_x, num_u))\n C = np.zeros((num_y, num_x))\n D = np.zeros((num_y, num_u))\n self.assertIsNotNone(LinearSystem(A, B, C, D))\n\n @numpy_compare.check_nonsymbolic_types\n def test_linear_transform_density(self, T):\n dut = LinearTransformDensity_[T](distribution=RandomDistribution.\n kGaussian, input_size=3, output_size=3)\n w_in = np.array([T(0.5), T(0.1), T(1.5)])\n context = dut.CreateDefaultContext()\n dut.get_input_port_w_in().FixValue(context, w_in)\n self.assertEqual(dut.get_input_port_A().size(), 9)\n self.assertEqual(dut.get_input_port_b().size(), 3)\n self.assertEqual(dut.get_distribution(), RandomDistribution.kGaussian)\n A = np.array([[T(0.5), T(1), T(2)], [T(1), T(2), T(3)], [T(3), T(4),\n T(5)]])\n dut.FixConstantA(context=context, A=A)\n b = np.array([T(1), T(2), T(3)])\n dut.FixConstantB(context=context, b=b)\n dut.CalcDensity(context=context)\n self.assertEqual(dut.get_output_port_w_out().size(), 3)\n self.assertEqual(dut.get_output_port_w_out_density().size(), 1)\n\n def test_vector_pass_through(self):\n model_value = BasicVector([1.0, 2, 3])\n system = PassThrough(vector_size=model_value.size())\n context = system.CreateDefaultContext()\n system.get_input_port(0).FixValue(context, model_value)\n output = system.AllocateOutput()\n input_eval = system.EvalVectorInput(context, 0)\n compare_value(self, input_eval, model_value)\n system.CalcOutput(context, output)\n output_value = output.get_vector_data(0)\n compare_value(self, output_value, model_value)\n\n def test_default_vector_pass_through(self):\n model_value = [1.0, 2, 3]\n system = PassThrough(value=model_value)\n context = system.CreateDefaultContext()\n np.testing.assert_array_equal(model_value, system.get_output_port()\n .Eval(context))\n\n def test_abstract_pass_through(self):\n model_value = Value('Hello world')\n system = PassThrough(abstract_model_value=model_value)\n context = system.CreateDefaultContext()\n system.get_input_port(0).FixValue(context, model_value)\n output = system.AllocateOutput()\n input_eval = system.EvalAbstractInput(context, 0)\n compare_value(self, input_eval, model_value)\n system.CalcOutput(context, output)\n output_value = output.get_data(0)\n compare_value(self, output_value, model_value)\n\n def test_port_switch(self):\n system = PortSwitch(vector_size=2)\n a = system.DeclareInputPort(name='a')\n system.DeclareInputPort(name='b')\n context = system.CreateDefaultContext()\n self.assertIsInstance(a, InputPort)\n system.get_port_selector_input_port().FixValue(context, a.get_index())\n\n def test_first_order_low_pass_filter(self):\n filter1 = FirstOrderLowPassFilter(time_constant=3.0, size=4)\n self.assertEqual(filter1.get_time_constant(), 3.0)\n alpha = np.array([1, 2, 3])\n filter2 = FirstOrderLowPassFilter(time_constants=alpha)\n np.testing.assert_array_equal(filter2.get_time_constants_vector(),\n alpha)\n context = filter2.CreateDefaultContext()\n filter2.set_initial_output_value(context, [0.0, -0.2, 0.4])\n <mask token>\n\n def test_saturation(self):\n system = Saturation((0.0, -1.0, 3.0), (1.0, 2.0, 4.0))\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n system.get_input_port(0).FixValue(context, input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).\n CopyToVector(), expected))\n mytest((-5.0, 5.0, 4.0), (0.0, 2.0, 4.0))\n mytest((0.4, 0.0, 3.5), (0.4, 0.0, 3.5))\n\n def test_trajectory_source(self):\n ppt = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[2.0, 3.0], [\n 2.0, 1.0]])\n system = TrajectorySource(trajectory=ppt, output_derivative_order=0,\n zero_derivatives_beyond_limits=True)\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n context.SetTime(input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).\n CopyToVector(), expected))\n mytest(0.0, (2.0, 2.0))\n mytest(0.5, (2.5, 1.5))\n mytest(1.0, (3.0, 1.0))\n ppt2 = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[4.0, 6.0],\n [4.0, 2.0]])\n system.UpdateTrajectory(trajectory=ppt2)\n mytest(0.0, (4.0, 4.0))\n mytest(0.5, (5.0, 3.0))\n mytest(1.0, (6.0, 2.0))\n\n def test_symbolic_vector_system(self):\n t = Variable('t')\n x = [Variable('x0'), Variable('x1')]\n u = [Variable('u0'), Variable('u1')]\n system = SymbolicVectorSystem(time=t, state=x, input=u, dynamics=[x\n [0] + x[1], t], output=[u[1]], time_period=0.0)\n context = system.CreateDefaultContext()\n self.assertEqual(context.num_continuous_states(), 2)\n self.assertEqual(context.num_discrete_state_groups(), 0)\n self.assertEqual(system.get_input_port(0).size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertEqual(context.num_abstract_parameters(), 0)\n self.assertEqual(context.num_numeric_parameter_groups(), 0)\n self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(x[0] + x[1])\n )\n self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))\n\n def test_symbolic_vector_system_parameters(self):\n t = Variable('t')\n x = [Variable('x0'), Variable('x1')]\n u = [Variable('u0'), Variable('u1')]\n p = [Variable('p0'), Variable('p1')]\n system = SymbolicVectorSystem(time=t, state=x, input=u, parameter=p,\n dynamics=[p[0] * x[0] + x[1] + p[1], t], output=[u[1]],\n time_period=0.0)\n context = system.CreateDefaultContext()\n self.assertEqual(context.num_continuous_states(), 2)\n self.assertEqual(context.num_discrete_state_groups(), 0)\n self.assertEqual(system.get_input_port(0).size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertEqual(context.num_abstract_parameters(), 0)\n self.assertEqual(context.num_numeric_parameter_groups(), 1)\n self.assertEqual(context.get_numeric_parameter(0).size(), 2)\n self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(p[0] * x\n [0] + x[1] + p[1]))\n self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))\n\n def test_wrap_to_system(self):\n system = WrapToSystem(2)\n system.set_interval(1, 1.0, 2.0)\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n system.get_input_port(0).FixValue(context, input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).\n CopyToVector(), expected))\n mytest((-1.5, 0.5), (-1.5, 1.5))\n mytest((0.2, 0.3), (0.2, 1.3))\n\n def test_demultiplexer(self):\n demux = Demultiplexer(size=4)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), 4)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(), [1, 1, 1, 1]\n )\n input_vec = np.array([1.0, 2.0, 3.0, 4.0])\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n for i in range(4):\n self.assertTrue(np.allclose(output.get_vector_data(i).get_value\n (), input_vec[i]))\n demux = Demultiplexer(size=4, output_ports_size=2)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), 2)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(), [2, 2])\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n for i in range(2):\n self.assertTrue(np.allclose(output.get_vector_data(i).get_value\n (), input_vec[2 * i:2 * i + 2]))\n output_ports_sizes = np.array([1, 2, 1])\n num_output_ports = output_ports_sizes.size\n input_vec = np.array([1.0, 2.0, 3.0, 4.0])\n demux = Demultiplexer(output_ports_sizes=output_ports_sizes)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), num_output_ports)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(),\n output_ports_sizes)\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n output_port_start = 0\n for i in range(num_output_ports):\n output_port_size = output.get_vector_data(i).size()\n self.assertTrue(np.allclose(output.get_vector_data(i).get_value\n (), input_vec[output_port_start:output_port_start +\n output_port_size]))\n output_port_start += output_port_size\n <mask token>\n\n def test_multilayer_perceptron(self):\n mlp = MultilayerPerceptron(layers=[1, 2, 3], activation_type=\n PerceptronActivationType.kReLU)\n self.assertEqual(mlp.get_input_port().size(), 1)\n self.assertEqual(mlp.get_output_port().size(), 3)\n context = mlp.CreateDefaultContext()\n params = np.zeros((mlp.num_parameters(), 1))\n self.assertEqual(mlp.num_parameters(), 13)\n self.assertEqual(mlp.layers(), [1, 2, 3])\n self.assertEqual(mlp.activation_type(layer=0),\n PerceptronActivationType.kReLU)\n self.assertEqual(len(mlp.GetParameters(context=context)), mlp.\n num_parameters())\n mlp.SetWeights(context=context, layer=0, W=np.array([[1], [2]]))\n mlp.SetBiases(context=context, layer=0, b=[3, 4])\n np.testing.assert_array_equal(mlp.GetWeights(context=context, layer\n =0), np.array([[1], [2]]))\n np.testing.assert_array_equal(mlp.GetBiases(context=context, layer=\n 0), np.array([3, 4]))\n params = np.zeros(mlp.num_parameters())\n mlp.SetWeights(params=params, layer=0, W=np.array([[1], [2]]))\n mlp.SetBiases(params=params, layer=0, b=[3, 4])\n np.testing.assert_array_equal(mlp.GetWeights(params=params, layer=0\n ), np.array([[1], [2]]))\n np.testing.assert_array_equal(mlp.GetBiases(params=params, layer=0),\n np.array([3, 4]))\n mutable_params = mlp.GetMutableParameters(context=context)\n mutable_params[:] = 3.0\n np.testing.assert_array_equal(mlp.GetParameters(context), np.full(\n mlp.num_parameters(), 3.0))\n global called_loss\n called_loss = False\n\n def silly_loss(Y, dloss_dY):\n global called_loss\n called_loss = True\n dloss_dY[:] = 1\n return Y.sum()\n dloss_dparams = np.zeros((13,))\n generator = RandomGenerator(23)\n mlp.SetRandomContext(context, generator)\n mlp.Backpropagation(context=context, X=np.array([1, 3, 4]).reshape(\n (1, 3)), loss=silly_loss, dloss_dparams=dloss_dparams)\n self.assertTrue(called_loss)\n self.assertTrue(dloss_dparams.any())\n dloss_dparams = np.zeros((13,))\n mlp.BackpropagationMeanSquaredError(context=context, X=np.array([1,\n 3, 4]).reshape((1, 3)), Y_desired=np.eye(3), dloss_dparams=\n dloss_dparams)\n self.assertTrue(dloss_dparams.any())\n Y = np.asfortranarray(np.eye(3))\n mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]), Y=Y)\n self.assertFalse(np.allclose(Y, np.eye(3)))\n Y2 = mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]))\n np.testing.assert_array_equal(Y, Y2)\n mlp2 = MultilayerPerceptron(layers=[3, 2, 1], activation_types=[\n PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])\n self.assertEqual(mlp2.activation_type(0), PerceptronActivationType.\n kReLU)\n self.assertEqual(mlp2.activation_type(1), PerceptronActivationType.\n kTanh)\n Y = np.asfortranarray(np.full((1, 3), 2.4))\n dYdX = np.asfortranarray(np.full((3, 3), 5.3))\n context2 = mlp2.CreateDefaultContext()\n mlp2.BatchOutput(context=context2, X=np.eye(3), Y=Y, dYdX=dYdX)\n np.testing.assert_array_almost_equal(Y, np.zeros((1, 3)))\n np.testing.assert_array_almost_equal(dYdX, np.zeros((3, 3)))\n mlp = MultilayerPerceptron(use_sin_cos_for_input=[True, False],\n remaining_layers=[3, 2], activation_types=[\n PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])\n self.assertEqual(mlp.get_input_port().size(), 2)\n np.testing.assert_array_equal(mlp.layers(), [3, 3, 2])\n\n def test_random_source(self):\n source = RandomSource(distribution=RandomDistribution.kUniform,\n num_outputs=2, sampling_interval_sec=0.01)\n self.assertEqual(source.get_output_port(0).size(), 2)\n builder = DiagramBuilder()\n AddRandomInputs(sampling_interval_sec=0.01, builder=builder)\n builder_ad = DiagramBuilder_[AutoDiffXd]()\n AddRandomInputs(sampling_interval_sec=0.01, builder=builder_ad)\n\n def test_constant_vector_source(self):\n source = ConstantVectorSource(source_value=[1.0, 2.0])\n context = source.CreateDefaultContext()\n source.get_source_value(context)\n source.get_mutable_source_value(context)\n\n def test_ctor_api(self):\n \"\"\"Tests construction of systems for systems whose executions semantics\n are not tested above.\n \"\"\"\n ConstantValueSource(Value('Hello world'))\n DiscreteTimeDelay(update_sec=0.1, delay_time_steps=5, vector_size=2)\n DiscreteTimeDelay(update_sec=0.1, delay_time_steps=5,\n abstract_model_value=Value('Hello world'))\n with catch_drake_warnings(expected_count=2) as w:\n DiscreteTimeDelay(update_sec=0.1, delay_timesteps=5, vector_size=2)\n DiscreteTimeDelay(update_sec=0.1, delay_timesteps=5,\n abstract_model_value=Value('Hello world'))\n ZeroOrderHold(period_sec=0.1, offset_sec=0.0, vector_size=2)\n dut = ZeroOrderHold(period_sec=1.0, offset_sec=0.25,\n abstract_model_value=Value('Hello world'))\n self.assertEqual(dut.period(), 1.0)\n self.assertEqual(dut.offset(), 0.25)\n\n def test_shared_pointer_system_ctor(self):\n dut = SharedPointerSystem(value_to_hold=[1, 2, 3])\n readback = dut.get()\n self.assertListEqual(readback, [1, 2, 3])\n del dut\n self.assertListEqual(readback, [1, 2, 3])\n\n def test_shared_pointer_system_builder(self):\n builder = DiagramBuilder()\n self.assertListEqual(SharedPointerSystem.AddToBuilder(builder=\n builder, value_to_hold=[1, 2, 3]), [1, 2, 3])\n diagram = builder.Build()\n del builder\n readback = diagram.GetSystems()[0].get()\n self.assertListEqual(readback, [1, 2, 3])\n del diagram\n self.assertListEqual(readback, [1, 2, 3])\n\n def test_sine(self):\n sine_source = Sine(amplitude=1, frequency=2, phase=3, size=1,\n is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 1)\n self.assertEqual(sine_source.get_output_port(1).size(), 1)\n self.assertEqual(sine_source.get_output_port(2).size(), 1)\n sine_source = Sine(amplitude=1, frequency=2, phase=3, size=3,\n is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 3)\n self.assertEqual(sine_source.get_output_port(1).size(), 3)\n self.assertEqual(sine_source.get_output_port(2).size(), 3)\n sine_source = Sine(amplitudes=np.ones(2), frequencies=np.ones(2),\n phases=np.ones(2), is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 2)\n self.assertEqual(sine_source.get_output_port(1).size(), 2)\n self.assertEqual(sine_source.get_output_port(2).size(), 2)\n\n def test_discrete_derivative(self):\n discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=0.5)\n self.assertEqual(discrete_derivative.get_input_port(0).size(), 5)\n self.assertEqual(discrete_derivative.get_output_port(0).size(), 5)\n self.assertEqual(discrete_derivative.time_step(), 0.5)\n self.assertTrue(discrete_derivative.suppress_initial_transient())\n discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=\n 0.5, suppress_initial_transient=False)\n self.assertFalse(discrete_derivative.suppress_initial_transient())\n <mask token>\n\n @numpy_compare.check_nonsymbolic_types\n def test_log_vector_output(self, T):\n builder = DiagramBuilder_[T]()\n kSize = 1\n integrator = builder.AddSystem(Integrator_[T](kSize))\n port = integrator.get_output_port(0)\n loggers = []\n loggers.append(LogVectorOutput(port, builder))\n loggers.append(LogVectorOutput(src=port, builder=builder))\n loggers.append(LogVectorOutput(port, builder, 0.125))\n loggers.append(LogVectorOutput(src=port, builder=builder,\n publish_period=0.125))\n loggers.append(LogVectorOutput(port, builder, {TriggerType.kForced}))\n loggers.append(LogVectorOutput(src=port, builder=builder,\n publish_triggers={TriggerType.kForced}))\n loggers.append(LogVectorOutput(port, builder, {TriggerType.\n kPeriodic}, 0.125))\n loggers.append(LogVectorOutput(src=port, builder=builder,\n publish_triggers={TriggerType.kPeriodic}, publish_period=0.125))\n diagram = builder.Build()\n context = diagram.CreateDefaultContext()\n self.assertTrue(all(logger.FindLog(context).num_samples() == 0 for\n logger in loggers))\n <mask token>\n\n @numpy_compare.check_nonsymbolic_types\n def test_vector_log_sink(self, T):\n builder = DiagramBuilder_[T]()\n kSize = 1\n constructors = [VectorLogSink_[T]]\n loggers = []\n if T == float:\n constructors.append(VectorLogSink)\n for constructor in constructors:\n loggers.append(builder.AddSystem(constructor(kSize)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize)))\n loggers.append(builder.AddSystem(constructor(kSize, 0.125)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize,\n publish_period=0.125)))\n loggers.append(builder.AddSystem(constructor(kSize, {\n TriggerType.kForced})))\n loggers.append(builder.AddSystem(constructor(input_size=kSize,\n publish_triggers={TriggerType.kForced})))\n loggers.append(builder.AddSystem(constructor(kSize, {\n TriggerType.kPeriodic}, 0.125)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize,\n publish_triggers={TriggerType.kPeriodic}, publish_period=\n 0.125)))\n diagram = builder.Build()\n context = diagram.CreateDefaultContext()\n self.assertTrue(all(logger.FindLog(context) == logger.\n FindMutableLog(context) for logger in loggers))\n loggers_and_contexts = [(x, x.GetMyContextFromRoot(context)) for x in\n loggers]\n self.assertTrue(all(logger.GetLog(logger_context) == logger.\n GetMutableLog(logger_context) for logger, logger_context in\n loggers_and_contexts))\n self.assertTrue(all(logger.GetLog(logger_context) == logger.FindLog\n (context) for logger, logger_context in loggers_and_contexts))\n",
"step-4": "<mask token>\n\n\nclass TestGeneral(unittest.TestCase):\n\n def _check_instantiations(self, template, supports_symbolic=True):\n default_cls = template[None]\n self.assertTrue(template[float] is default_cls)\n self.assertTrue(template[AutoDiffXd] is not default_cls)\n if supports_symbolic:\n self.assertTrue(template[Expression] is not default_cls)\n\n def test_instantiations(self):\n self._check_instantiations(Adder_)\n self._check_instantiations(AffineSystem_)\n self._check_instantiations(ConstantValueSource_)\n self._check_instantiations(ConstantVectorSource_)\n self._check_instantiations(Demultiplexer_)\n self._check_instantiations(DiscreteDerivative_)\n self._check_instantiations(DiscreteTimeDelay_)\n self._check_instantiations(Gain_)\n self._check_instantiations(Integrator_)\n self._check_instantiations(LinearSystem_)\n self._check_instantiations(LinearTransformDensity_,\n supports_symbolic=False)\n self._check_instantiations(Multiplexer_)\n self._check_instantiations(MultilayerPerceptron_)\n self._check_instantiations(PassThrough_)\n self._check_instantiations(PortSwitch_)\n self._check_instantiations(Saturation_)\n self._check_instantiations(SharedPointerSystem_)\n self._check_instantiations(Sine_)\n self._check_instantiations(StateInterpolatorWithDiscreteDerivative_)\n self._check_instantiations(SymbolicVectorSystem_)\n self._check_instantiations(TrajectoryAffineSystem_,\n supports_symbolic=False)\n self._check_instantiations(TrajectoryLinearSystem_,\n supports_symbolic=False)\n self._check_instantiations(TrajectorySource_)\n self._check_instantiations(VectorLogSink_)\n self._check_instantiations(WrapToSystem_)\n self._check_instantiations(ZeroOrderHold_)\n\n def test_linear_affine_system(self):\n A = np.identity(2)\n B = np.array([[0], [1]])\n f0 = np.array([[0], [0]])\n C = np.array([[0, 1]])\n D = [1]\n y0 = [0]\n system = LinearSystem(A, B, C, D)\n context = system.CreateDefaultContext()\n self.assertEqual(system.get_input_port(0).size(), 1)\n self.assertEqual(context.get_mutable_continuous_state_vector().size\n (), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertTrue((system.A() == A).all())\n self.assertTrue((system.B() == B).all())\n self.assertTrue((system.f0() == f0).all())\n self.assertTrue((system.C() == C).all())\n self.assertEqual(system.D(), D)\n self.assertEqual(system.y0(), y0)\n self.assertEqual(system.time_period(), 0.0)\n x0 = np.array([1, 2])\n system.configure_default_state(x0=x0)\n system.SetDefaultContext(context)\n np.testing.assert_equal(context.get_continuous_state_vector().\n CopyToVector(), x0)\n generator = RandomGenerator()\n system.SetRandomContext(context, generator)\n np.testing.assert_equal(context.get_continuous_state_vector().\n CopyToVector(), x0)\n system.configure_random_state(covariance=np.eye(2))\n system.SetRandomContext(context, generator)\n self.assertNotEqual(context.get_continuous_state_vector().\n CopyToVector()[1], x0[1])\n Co = ControllabilityMatrix(system)\n self.assertEqual(Co.shape, (2, 2))\n self.assertFalse(IsControllable(system))\n self.assertFalse(IsControllable(system, 1e-06))\n self.assertFalse(IsStabilizable(sys=system))\n self.assertFalse(IsStabilizable(sys=system, threshold=1e-06))\n Ob = ObservabilityMatrix(system)\n self.assertEqual(Ob.shape, (2, 2))\n self.assertFalse(IsObservable(system))\n self.assertFalse(IsDetectable(sys=system))\n self.assertFalse(IsDetectable(sys=system, threshold=1e-06))\n system = AffineSystem(A, B, f0, C, D, y0, 0.1)\n self.assertEqual(system.get_input_port(0), system.get_input_port())\n self.assertEqual(system.get_output_port(0), system.get_output_port())\n context = system.CreateDefaultContext()\n self.assertEqual(system.get_input_port(0).size(), 1)\n self.assertEqual(context.get_discrete_state_vector().size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertTrue((system.A() == A).all())\n self.assertTrue((system.B() == B).all())\n self.assertTrue((system.f0() == f0).all())\n self.assertTrue((system.C() == C).all())\n self.assertEqual(system.D(), D)\n self.assertEqual(system.y0(), y0)\n self.assertEqual(system.time_period(), 0.1)\n system.get_input_port(0).FixValue(context, 0)\n linearized = Linearize(system, context)\n self.assertTrue((linearized.A() == A).all())\n taylor = FirstOrderTaylorApproximation(system, context)\n self.assertTrue((taylor.y0() == y0).all())\n new_A = np.array([[1, 2], [3, 4]])\n new_B = np.array([[5], [6]])\n new_f0 = np.array([[7], [8]])\n new_C = np.array([[9, 10]])\n new_D = np.array([[11]])\n new_y0 = np.array([12])\n system.UpdateCoefficients(A=new_A, B=new_B, f0=new_f0, C=new_C, D=\n new_D, y0=new_y0)\n np.testing.assert_equal(new_A, system.A())\n np.testing.assert_equal(new_B, system.B())\n np.testing.assert_equal(new_f0.flatten(), system.f0())\n np.testing.assert_equal(new_C, system.C())\n np.testing.assert_equal(new_D, system.D())\n np.testing.assert_equal(new_y0, system.y0())\n system = MatrixGain(D=A)\n self.assertTrue((system.D() == A).all())\n system = TrajectoryAffineSystem(PiecewisePolynomial(A),\n PiecewisePolynomial(B), PiecewisePolynomial(f0),\n PiecewisePolynomial(C), PiecewisePolynomial(D),\n PiecewisePolynomial(y0), 0.1)\n self.assertEqual(system.get_input_port(0), system.get_input_port())\n self.assertEqual(system.get_output_port(0), system.get_output_port())\n context = system.CreateDefaultContext()\n self.assertEqual(system.get_input_port(0).size(), 1)\n self.assertEqual(context.get_discrete_state_vector().size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n for t in np.linspace(0.0, 1.0, 5):\n self.assertTrue((system.A(t) == A).all())\n self.assertTrue((system.B(t) == B).all())\n self.assertTrue((system.f0(t) == f0).all())\n self.assertTrue((system.C(t) == C).all())\n self.assertEqual(system.D(t), D)\n self.assertEqual(system.y0(t), y0)\n self.assertEqual(system.time_period(), 0.1)\n x0 = np.array([1, 2])\n system.configure_default_state(x0=x0)\n system.SetDefaultContext(context)\n np.testing.assert_equal(context.get_discrete_state_vector().\n CopyToVector(), x0)\n generator = RandomGenerator()\n system.SetRandomContext(context, generator)\n np.testing.assert_equal(context.get_discrete_state_vector().\n CopyToVector(), x0)\n system.configure_random_state(covariance=np.eye(2))\n system.SetRandomContext(context, generator)\n self.assertNotEqual(context.get_discrete_state_vector().\n CopyToVector()[1], x0[1])\n system = TrajectoryLinearSystem(A=PiecewisePolynomial(A), B=\n PiecewisePolynomial(B), C=PiecewisePolynomial(C), D=\n PiecewisePolynomial(D), time_period=0.1)\n self.assertEqual(system.time_period(), 0.1)\n system.configure_default_state(x0=np.array([1, 2]))\n system.configure_random_state(covariance=np.eye(2))\n\n def test_linear_affine_system_empty_matrices(self):\n\n def CheckSizes(system, num_states, num_inputs, num_outputs):\n self.assertEqual(system.num_continuous_states(), num_states)\n self.assertEqual(system.num_inputs(), num_inputs)\n self.assertEqual(system.num_outputs(), num_outputs)\n system = AffineSystem(y0=[2, 1])\n CheckSizes(system, num_states=0, num_inputs=0, num_outputs=2)\n system = AffineSystem(D=np.eye(2))\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = LinearSystem(D=np.eye(2))\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = AffineSystem(D=np.eye(2), y0=[1, 2])\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = LinearSystem(B=np.eye(2))\n CheckSizes(system, num_states=2, num_inputs=2, num_outputs=0)\n\n def test_linear_system_zero_size(self):\n num_x = 0\n num_y = 2\n num_u = 2\n A = np.zeros((num_x, num_x))\n B = np.zeros((num_x, num_u))\n C = np.zeros((num_y, num_x))\n D = np.zeros((num_y, num_u))\n self.assertIsNotNone(LinearSystem(A, B, C, D))\n\n @numpy_compare.check_nonsymbolic_types\n def test_linear_transform_density(self, T):\n dut = LinearTransformDensity_[T](distribution=RandomDistribution.\n kGaussian, input_size=3, output_size=3)\n w_in = np.array([T(0.5), T(0.1), T(1.5)])\n context = dut.CreateDefaultContext()\n dut.get_input_port_w_in().FixValue(context, w_in)\n self.assertEqual(dut.get_input_port_A().size(), 9)\n self.assertEqual(dut.get_input_port_b().size(), 3)\n self.assertEqual(dut.get_distribution(), RandomDistribution.kGaussian)\n A = np.array([[T(0.5), T(1), T(2)], [T(1), T(2), T(3)], [T(3), T(4),\n T(5)]])\n dut.FixConstantA(context=context, A=A)\n b = np.array([T(1), T(2), T(3)])\n dut.FixConstantB(context=context, b=b)\n dut.CalcDensity(context=context)\n self.assertEqual(dut.get_output_port_w_out().size(), 3)\n self.assertEqual(dut.get_output_port_w_out_density().size(), 1)\n\n def test_vector_pass_through(self):\n model_value = BasicVector([1.0, 2, 3])\n system = PassThrough(vector_size=model_value.size())\n context = system.CreateDefaultContext()\n system.get_input_port(0).FixValue(context, model_value)\n output = system.AllocateOutput()\n input_eval = system.EvalVectorInput(context, 0)\n compare_value(self, input_eval, model_value)\n system.CalcOutput(context, output)\n output_value = output.get_vector_data(0)\n compare_value(self, output_value, model_value)\n\n def test_default_vector_pass_through(self):\n model_value = [1.0, 2, 3]\n system = PassThrough(value=model_value)\n context = system.CreateDefaultContext()\n np.testing.assert_array_equal(model_value, system.get_output_port()\n .Eval(context))\n\n def test_abstract_pass_through(self):\n model_value = Value('Hello world')\n system = PassThrough(abstract_model_value=model_value)\n context = system.CreateDefaultContext()\n system.get_input_port(0).FixValue(context, model_value)\n output = system.AllocateOutput()\n input_eval = system.EvalAbstractInput(context, 0)\n compare_value(self, input_eval, model_value)\n system.CalcOutput(context, output)\n output_value = output.get_data(0)\n compare_value(self, output_value, model_value)\n\n def test_port_switch(self):\n system = PortSwitch(vector_size=2)\n a = system.DeclareInputPort(name='a')\n system.DeclareInputPort(name='b')\n context = system.CreateDefaultContext()\n self.assertIsInstance(a, InputPort)\n system.get_port_selector_input_port().FixValue(context, a.get_index())\n\n def test_first_order_low_pass_filter(self):\n filter1 = FirstOrderLowPassFilter(time_constant=3.0, size=4)\n self.assertEqual(filter1.get_time_constant(), 3.0)\n alpha = np.array([1, 2, 3])\n filter2 = FirstOrderLowPassFilter(time_constants=alpha)\n np.testing.assert_array_equal(filter2.get_time_constants_vector(),\n alpha)\n context = filter2.CreateDefaultContext()\n filter2.set_initial_output_value(context, [0.0, -0.2, 0.4])\n <mask token>\n\n def test_saturation(self):\n system = Saturation((0.0, -1.0, 3.0), (1.0, 2.0, 4.0))\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n system.get_input_port(0).FixValue(context, input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).\n CopyToVector(), expected))\n mytest((-5.0, 5.0, 4.0), (0.0, 2.0, 4.0))\n mytest((0.4, 0.0, 3.5), (0.4, 0.0, 3.5))\n\n def test_trajectory_source(self):\n ppt = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[2.0, 3.0], [\n 2.0, 1.0]])\n system = TrajectorySource(trajectory=ppt, output_derivative_order=0,\n zero_derivatives_beyond_limits=True)\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n context.SetTime(input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).\n CopyToVector(), expected))\n mytest(0.0, (2.0, 2.0))\n mytest(0.5, (2.5, 1.5))\n mytest(1.0, (3.0, 1.0))\n ppt2 = PiecewisePolynomial.FirstOrderHold([0.0, 1.0], [[4.0, 6.0],\n [4.0, 2.0]])\n system.UpdateTrajectory(trajectory=ppt2)\n mytest(0.0, (4.0, 4.0))\n mytest(0.5, (5.0, 3.0))\n mytest(1.0, (6.0, 2.0))\n\n def test_symbolic_vector_system(self):\n t = Variable('t')\n x = [Variable('x0'), Variable('x1')]\n u = [Variable('u0'), Variable('u1')]\n system = SymbolicVectorSystem(time=t, state=x, input=u, dynamics=[x\n [0] + x[1], t], output=[u[1]], time_period=0.0)\n context = system.CreateDefaultContext()\n self.assertEqual(context.num_continuous_states(), 2)\n self.assertEqual(context.num_discrete_state_groups(), 0)\n self.assertEqual(system.get_input_port(0).size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertEqual(context.num_abstract_parameters(), 0)\n self.assertEqual(context.num_numeric_parameter_groups(), 0)\n self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(x[0] + x[1])\n )\n self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))\n\n def test_symbolic_vector_system_parameters(self):\n t = Variable('t')\n x = [Variable('x0'), Variable('x1')]\n u = [Variable('u0'), Variable('u1')]\n p = [Variable('p0'), Variable('p1')]\n system = SymbolicVectorSystem(time=t, state=x, input=u, parameter=p,\n dynamics=[p[0] * x[0] + x[1] + p[1], t], output=[u[1]],\n time_period=0.0)\n context = system.CreateDefaultContext()\n self.assertEqual(context.num_continuous_states(), 2)\n self.assertEqual(context.num_discrete_state_groups(), 0)\n self.assertEqual(system.get_input_port(0).size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertEqual(context.num_abstract_parameters(), 0)\n self.assertEqual(context.num_numeric_parameter_groups(), 1)\n self.assertEqual(context.get_numeric_parameter(0).size(), 2)\n self.assertTrue(system.dynamics_for_variable(x[0]).EqualTo(p[0] * x\n [0] + x[1] + p[1]))\n self.assertTrue(system.dynamics_for_variable(x[1]).EqualTo(t))\n\n def test_wrap_to_system(self):\n system = WrapToSystem(2)\n system.set_interval(1, 1.0, 2.0)\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n system.get_input_port(0).FixValue(context, input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).\n CopyToVector(), expected))\n mytest((-1.5, 0.5), (-1.5, 1.5))\n mytest((0.2, 0.3), (0.2, 1.3))\n\n def test_demultiplexer(self):\n demux = Demultiplexer(size=4)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), 4)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(), [1, 1, 1, 1]\n )\n input_vec = np.array([1.0, 2.0, 3.0, 4.0])\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n for i in range(4):\n self.assertTrue(np.allclose(output.get_vector_data(i).get_value\n (), input_vec[i]))\n demux = Demultiplexer(size=4, output_ports_size=2)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), 2)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(), [2, 2])\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n for i in range(2):\n self.assertTrue(np.allclose(output.get_vector_data(i).get_value\n (), input_vec[2 * i:2 * i + 2]))\n output_ports_sizes = np.array([1, 2, 1])\n num_output_ports = output_ports_sizes.size\n input_vec = np.array([1.0, 2.0, 3.0, 4.0])\n demux = Demultiplexer(output_ports_sizes=output_ports_sizes)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), num_output_ports)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(),\n output_ports_sizes)\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n output_port_start = 0\n for i in range(num_output_ports):\n output_port_size = output.get_vector_data(i).size()\n self.assertTrue(np.allclose(output.get_vector_data(i).get_value\n (), input_vec[output_port_start:output_port_start +\n output_port_size]))\n output_port_start += output_port_size\n\n def test_multiplexer(self):\n my_vector = MyVector2(data=[1.0, 2.0])\n test_cases = [dict(has_vector=False, mux=Multiplexer(\n num_scalar_inputs=4), data=[[5.0], [3.0], [4.0], [2.0]]), dict(\n has_vector=False, mux=Multiplexer(input_sizes=[2, 3]), data=[[\n 8.0, 4.0], [3.0, 6.0, 9.0]]), dict(has_vector=True, mux=\n Multiplexer(model_vector=my_vector), data=[[42.0], [3.0]])]\n for case in test_cases:\n mux = case['mux']\n port_size = sum([len(vec) for vec in case['data']])\n self.assertEqual(mux.get_output_port(0).size(), port_size)\n context = mux.CreateDefaultContext()\n output = mux.AllocateOutput()\n num_ports = len(case['data'])\n self.assertEqual(context.num_input_ports(), num_ports)\n for j, vec in enumerate(case['data']):\n mux.get_input_port(j).FixValue(context, vec)\n mux.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(0).get_value\n (), [elem for vec in case['data'] for elem in vec]))\n if case['has_vector']:\n value = output.get_vector_data(0)\n self.assertTrue(isinstance(value, MyVector2))\n\n def test_multilayer_perceptron(self):\n mlp = MultilayerPerceptron(layers=[1, 2, 3], activation_type=\n PerceptronActivationType.kReLU)\n self.assertEqual(mlp.get_input_port().size(), 1)\n self.assertEqual(mlp.get_output_port().size(), 3)\n context = mlp.CreateDefaultContext()\n params = np.zeros((mlp.num_parameters(), 1))\n self.assertEqual(mlp.num_parameters(), 13)\n self.assertEqual(mlp.layers(), [1, 2, 3])\n self.assertEqual(mlp.activation_type(layer=0),\n PerceptronActivationType.kReLU)\n self.assertEqual(len(mlp.GetParameters(context=context)), mlp.\n num_parameters())\n mlp.SetWeights(context=context, layer=0, W=np.array([[1], [2]]))\n mlp.SetBiases(context=context, layer=0, b=[3, 4])\n np.testing.assert_array_equal(mlp.GetWeights(context=context, layer\n =0), np.array([[1], [2]]))\n np.testing.assert_array_equal(mlp.GetBiases(context=context, layer=\n 0), np.array([3, 4]))\n params = np.zeros(mlp.num_parameters())\n mlp.SetWeights(params=params, layer=0, W=np.array([[1], [2]]))\n mlp.SetBiases(params=params, layer=0, b=[3, 4])\n np.testing.assert_array_equal(mlp.GetWeights(params=params, layer=0\n ), np.array([[1], [2]]))\n np.testing.assert_array_equal(mlp.GetBiases(params=params, layer=0),\n np.array([3, 4]))\n mutable_params = mlp.GetMutableParameters(context=context)\n mutable_params[:] = 3.0\n np.testing.assert_array_equal(mlp.GetParameters(context), np.full(\n mlp.num_parameters(), 3.0))\n global called_loss\n called_loss = False\n\n def silly_loss(Y, dloss_dY):\n global called_loss\n called_loss = True\n dloss_dY[:] = 1\n return Y.sum()\n dloss_dparams = np.zeros((13,))\n generator = RandomGenerator(23)\n mlp.SetRandomContext(context, generator)\n mlp.Backpropagation(context=context, X=np.array([1, 3, 4]).reshape(\n (1, 3)), loss=silly_loss, dloss_dparams=dloss_dparams)\n self.assertTrue(called_loss)\n self.assertTrue(dloss_dparams.any())\n dloss_dparams = np.zeros((13,))\n mlp.BackpropagationMeanSquaredError(context=context, X=np.array([1,\n 3, 4]).reshape((1, 3)), Y_desired=np.eye(3), dloss_dparams=\n dloss_dparams)\n self.assertTrue(dloss_dparams.any())\n Y = np.asfortranarray(np.eye(3))\n mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]), Y=Y)\n self.assertFalse(np.allclose(Y, np.eye(3)))\n Y2 = mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]))\n np.testing.assert_array_equal(Y, Y2)\n mlp2 = MultilayerPerceptron(layers=[3, 2, 1], activation_types=[\n PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])\n self.assertEqual(mlp2.activation_type(0), PerceptronActivationType.\n kReLU)\n self.assertEqual(mlp2.activation_type(1), PerceptronActivationType.\n kTanh)\n Y = np.asfortranarray(np.full((1, 3), 2.4))\n dYdX = np.asfortranarray(np.full((3, 3), 5.3))\n context2 = mlp2.CreateDefaultContext()\n mlp2.BatchOutput(context=context2, X=np.eye(3), Y=Y, dYdX=dYdX)\n np.testing.assert_array_almost_equal(Y, np.zeros((1, 3)))\n np.testing.assert_array_almost_equal(dYdX, np.zeros((3, 3)))\n mlp = MultilayerPerceptron(use_sin_cos_for_input=[True, False],\n remaining_layers=[3, 2], activation_types=[\n PerceptronActivationType.kReLU, PerceptronActivationType.kTanh])\n self.assertEqual(mlp.get_input_port().size(), 2)\n np.testing.assert_array_equal(mlp.layers(), [3, 3, 2])\n\n def test_random_source(self):\n source = RandomSource(distribution=RandomDistribution.kUniform,\n num_outputs=2, sampling_interval_sec=0.01)\n self.assertEqual(source.get_output_port(0).size(), 2)\n builder = DiagramBuilder()\n AddRandomInputs(sampling_interval_sec=0.01, builder=builder)\n builder_ad = DiagramBuilder_[AutoDiffXd]()\n AddRandomInputs(sampling_interval_sec=0.01, builder=builder_ad)\n\n def test_constant_vector_source(self):\n source = ConstantVectorSource(source_value=[1.0, 2.0])\n context = source.CreateDefaultContext()\n source.get_source_value(context)\n source.get_mutable_source_value(context)\n\n def test_ctor_api(self):\n \"\"\"Tests construction of systems for systems whose executions semantics\n are not tested above.\n \"\"\"\n ConstantValueSource(Value('Hello world'))\n DiscreteTimeDelay(update_sec=0.1, delay_time_steps=5, vector_size=2)\n DiscreteTimeDelay(update_sec=0.1, delay_time_steps=5,\n abstract_model_value=Value('Hello world'))\n with catch_drake_warnings(expected_count=2) as w:\n DiscreteTimeDelay(update_sec=0.1, delay_timesteps=5, vector_size=2)\n DiscreteTimeDelay(update_sec=0.1, delay_timesteps=5,\n abstract_model_value=Value('Hello world'))\n ZeroOrderHold(period_sec=0.1, offset_sec=0.0, vector_size=2)\n dut = ZeroOrderHold(period_sec=1.0, offset_sec=0.25,\n abstract_model_value=Value('Hello world'))\n self.assertEqual(dut.period(), 1.0)\n self.assertEqual(dut.offset(), 0.25)\n\n def test_shared_pointer_system_ctor(self):\n dut = SharedPointerSystem(value_to_hold=[1, 2, 3])\n readback = dut.get()\n self.assertListEqual(readback, [1, 2, 3])\n del dut\n self.assertListEqual(readback, [1, 2, 3])\n\n def test_shared_pointer_system_builder(self):\n builder = DiagramBuilder()\n self.assertListEqual(SharedPointerSystem.AddToBuilder(builder=\n builder, value_to_hold=[1, 2, 3]), [1, 2, 3])\n diagram = builder.Build()\n del builder\n readback = diagram.GetSystems()[0].get()\n self.assertListEqual(readback, [1, 2, 3])\n del diagram\n self.assertListEqual(readback, [1, 2, 3])\n\n def test_sine(self):\n sine_source = Sine(amplitude=1, frequency=2, phase=3, size=1,\n is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 1)\n self.assertEqual(sine_source.get_output_port(1).size(), 1)\n self.assertEqual(sine_source.get_output_port(2).size(), 1)\n sine_source = Sine(amplitude=1, frequency=2, phase=3, size=3,\n is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 3)\n self.assertEqual(sine_source.get_output_port(1).size(), 3)\n self.assertEqual(sine_source.get_output_port(2).size(), 3)\n sine_source = Sine(amplitudes=np.ones(2), frequencies=np.ones(2),\n phases=np.ones(2), is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 2)\n self.assertEqual(sine_source.get_output_port(1).size(), 2)\n self.assertEqual(sine_source.get_output_port(2).size(), 2)\n\n def test_discrete_derivative(self):\n discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=0.5)\n self.assertEqual(discrete_derivative.get_input_port(0).size(), 5)\n self.assertEqual(discrete_derivative.get_output_port(0).size(), 5)\n self.assertEqual(discrete_derivative.time_step(), 0.5)\n self.assertTrue(discrete_derivative.suppress_initial_transient())\n discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=\n 0.5, suppress_initial_transient=False)\n self.assertFalse(discrete_derivative.suppress_initial_transient())\n\n def test_state_interpolator_with_discrete_derivative(self):\n state_interpolator = StateInterpolatorWithDiscreteDerivative(\n num_positions=5, time_step=0.4)\n self.assertEqual(state_interpolator.get_input_port(0).size(), 5)\n self.assertEqual(state_interpolator.get_output_port(0).size(), 10)\n self.assertTrue(state_interpolator.suppress_initial_transient())\n context = state_interpolator.CreateDefaultContext()\n state_interpolator.set_initial_position(context=context, position=5 *\n [1.1])\n np.testing.assert_array_equal(context.get_discrete_state(0).\n CopyToVector(), np.array(5 * [1.1]))\n np.testing.assert_array_equal(context.get_discrete_state(1).\n CopyToVector(), np.array(5 * [1.1]))\n context = state_interpolator.CreateDefaultContext()\n state_interpolator.set_initial_position(state=context.get_state(),\n position=5 * [1.3])\n np.testing.assert_array_equal(context.get_discrete_state(0).\n CopyToVector(), np.array(5 * [1.3]))\n np.testing.assert_array_equal(context.get_discrete_state(1).\n CopyToVector(), np.array(5 * [1.3]))\n state_interpolator = StateInterpolatorWithDiscreteDerivative(\n num_positions=5, time_step=0.4, suppress_initial_transient=True)\n self.assertTrue(state_interpolator.suppress_initial_transient())\n\n @numpy_compare.check_nonsymbolic_types\n def test_log_vector_output(self, T):\n builder = DiagramBuilder_[T]()\n kSize = 1\n integrator = builder.AddSystem(Integrator_[T](kSize))\n port = integrator.get_output_port(0)\n loggers = []\n loggers.append(LogVectorOutput(port, builder))\n loggers.append(LogVectorOutput(src=port, builder=builder))\n loggers.append(LogVectorOutput(port, builder, 0.125))\n loggers.append(LogVectorOutput(src=port, builder=builder,\n publish_period=0.125))\n loggers.append(LogVectorOutput(port, builder, {TriggerType.kForced}))\n loggers.append(LogVectorOutput(src=port, builder=builder,\n publish_triggers={TriggerType.kForced}))\n loggers.append(LogVectorOutput(port, builder, {TriggerType.\n kPeriodic}, 0.125))\n loggers.append(LogVectorOutput(src=port, builder=builder,\n publish_triggers={TriggerType.kPeriodic}, publish_period=0.125))\n diagram = builder.Build()\n context = diagram.CreateDefaultContext()\n self.assertTrue(all(logger.FindLog(context).num_samples() == 0 for\n logger in loggers))\n <mask token>\n\n @numpy_compare.check_nonsymbolic_types\n def test_vector_log_sink(self, T):\n builder = DiagramBuilder_[T]()\n kSize = 1\n constructors = [VectorLogSink_[T]]\n loggers = []\n if T == float:\n constructors.append(VectorLogSink)\n for constructor in constructors:\n loggers.append(builder.AddSystem(constructor(kSize)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize)))\n loggers.append(builder.AddSystem(constructor(kSize, 0.125)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize,\n publish_period=0.125)))\n loggers.append(builder.AddSystem(constructor(kSize, {\n TriggerType.kForced})))\n loggers.append(builder.AddSystem(constructor(input_size=kSize,\n publish_triggers={TriggerType.kForced})))\n loggers.append(builder.AddSystem(constructor(kSize, {\n TriggerType.kPeriodic}, 0.125)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize,\n publish_triggers={TriggerType.kPeriodic}, publish_period=\n 0.125)))\n diagram = builder.Build()\n context = diagram.CreateDefaultContext()\n self.assertTrue(all(logger.FindLog(context) == logger.\n FindMutableLog(context) for logger in loggers))\n loggers_and_contexts = [(x, x.GetMyContextFromRoot(context)) for x in\n loggers]\n self.assertTrue(all(logger.GetLog(logger_context) == logger.\n GetMutableLog(logger_context) for logger, logger_context in\n loggers_and_contexts))\n self.assertTrue(all(logger.GetLog(logger_context) == logger.FindLog\n (context) for logger, logger_context in loggers_and_contexts))\n",
"step-5": "import gc\nimport unittest\nimport numpy as np\n\nfrom pydrake.autodiffutils import AutoDiffXd\nfrom pydrake.common import RandomDistribution, RandomGenerator\nfrom pydrake.common.test_utilities import numpy_compare\nfrom pydrake.common.test_utilities.deprecation import catch_drake_warnings\nfrom pydrake.common.value import Value\nfrom pydrake.symbolic import Expression, Variable\nfrom pydrake.systems.framework import (\n BasicVector,\n DiagramBuilder,\n DiagramBuilder_,\n InputPort,\n TriggerType,\n VectorBase,\n)\nfrom pydrake.systems.test.test_util import (\n MyVector2,\n)\nfrom pydrake.systems.primitives import (\n Adder, Adder_,\n AddRandomInputs,\n AffineSystem, AffineSystem_,\n ConstantValueSource, ConstantValueSource_,\n ConstantVectorSource, ConstantVectorSource_,\n ControllabilityMatrix,\n Demultiplexer, Demultiplexer_,\n DiscreteDerivative, DiscreteDerivative_,\n DiscreteTimeDelay, DiscreteTimeDelay_,\n FirstOrderLowPassFilter,\n FirstOrderTaylorApproximation,\n Gain, Gain_,\n Integrator, Integrator_,\n IsControllable,\n IsDetectable,\n IsObservable,\n IsStabilizable,\n Linearize,\n LinearSystem, LinearSystem_,\n LinearTransformDensity, LinearTransformDensity_,\n LogVectorOutput,\n MatrixGain,\n Multiplexer, Multiplexer_,\n MultilayerPerceptron, MultilayerPerceptron_,\n ObservabilityMatrix,\n PassThrough, PassThrough_,\n PerceptronActivationType,\n PortSwitch, PortSwitch_,\n RandomSource,\n Saturation, Saturation_,\n SharedPointerSystem, SharedPointerSystem_,\n Sine, Sine_,\n StateInterpolatorWithDiscreteDerivative,\n StateInterpolatorWithDiscreteDerivative_,\n SymbolicVectorSystem, SymbolicVectorSystem_,\n TrajectoryAffineSystem, TrajectoryAffineSystem_,\n TrajectoryLinearSystem, TrajectoryLinearSystem_,\n TrajectorySource, TrajectorySource_,\n VectorLog, VectorLogSink, VectorLogSink_,\n WrapToSystem, WrapToSystem_,\n ZeroOrderHold, ZeroOrderHold_,\n)\nfrom pydrake.trajectories import PiecewisePolynomial\n\n\ndef compare_value(test, a, b):\n # Compares a vector or abstract value.\n if isinstance(a, VectorBase):\n test.assertTrue(np.allclose(a.get_value(), b.get_value()))\n else:\n test.assertEqual(type(a.get_value()), type(b.get_value()))\n test.assertEqual(a.get_value(), b.get_value())\n\n\nclass TestGeneral(unittest.TestCase):\n def _check_instantiations(self, template, supports_symbolic=True):\n default_cls = template[None]\n self.assertTrue(template[float] is default_cls)\n self.assertTrue(template[AutoDiffXd] is not default_cls)\n if supports_symbolic:\n self.assertTrue(template[Expression] is not default_cls)\n\n def test_instantiations(self):\n # TODO(eric.cousineau): Refine tests once NumPy functionality is\n # resolved for dtype=object, or dtype=custom is used.\n self._check_instantiations(Adder_)\n self._check_instantiations(AffineSystem_)\n self._check_instantiations(ConstantValueSource_)\n self._check_instantiations(ConstantVectorSource_)\n self._check_instantiations(Demultiplexer_)\n self._check_instantiations(DiscreteDerivative_)\n self._check_instantiations(DiscreteTimeDelay_)\n self._check_instantiations(Gain_)\n self._check_instantiations(Integrator_)\n self._check_instantiations(LinearSystem_)\n self._check_instantiations(LinearTransformDensity_,\n supports_symbolic=False)\n self._check_instantiations(Multiplexer_)\n self._check_instantiations(MultilayerPerceptron_)\n self._check_instantiations(PassThrough_)\n self._check_instantiations(PortSwitch_)\n self._check_instantiations(Saturation_)\n self._check_instantiations(SharedPointerSystem_)\n self._check_instantiations(Sine_)\n self._check_instantiations(StateInterpolatorWithDiscreteDerivative_)\n self._check_instantiations(SymbolicVectorSystem_)\n self._check_instantiations(TrajectoryAffineSystem_,\n supports_symbolic=False)\n self._check_instantiations(TrajectoryLinearSystem_,\n supports_symbolic=False)\n self._check_instantiations(TrajectorySource_)\n self._check_instantiations(VectorLogSink_)\n self._check_instantiations(WrapToSystem_)\n self._check_instantiations(ZeroOrderHold_)\n\n def test_linear_affine_system(self):\n # Just make sure linear system is spelled correctly.\n A = np.identity(2)\n B = np.array([[0], [1]])\n f0 = np.array([[0], [0]])\n C = np.array([[0, 1]])\n D = [1]\n y0 = [0]\n system = LinearSystem(A, B, C, D)\n context = system.CreateDefaultContext()\n self.assertEqual(system.get_input_port(0).size(), 1)\n self.assertEqual(context\n .get_mutable_continuous_state_vector().size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertTrue((system.A() == A).all())\n self.assertTrue((system.B() == B).all())\n self.assertTrue((system.f0() == f0).all())\n self.assertTrue((system.C() == C).all())\n self.assertEqual(system.D(), D)\n self.assertEqual(system.y0(), y0)\n self.assertEqual(system.time_period(), 0.)\n\n x0 = np.array([1, 2])\n system.configure_default_state(x0=x0)\n system.SetDefaultContext(context)\n np.testing.assert_equal(\n context.get_continuous_state_vector().CopyToVector(), x0)\n generator = RandomGenerator()\n system.SetRandomContext(context, generator)\n np.testing.assert_equal(\n context.get_continuous_state_vector().CopyToVector(), x0)\n system.configure_random_state(covariance=np.eye(2))\n system.SetRandomContext(context, generator)\n self.assertNotEqual(\n context.get_continuous_state_vector().CopyToVector()[1], x0[1])\n\n Co = ControllabilityMatrix(system)\n self.assertEqual(Co.shape, (2, 2))\n self.assertFalse(IsControllable(system))\n self.assertFalse(IsControllable(system, 1e-6))\n self.assertFalse(IsStabilizable(sys=system))\n self.assertFalse(IsStabilizable(sys=system, threshold=1e-6))\n Ob = ObservabilityMatrix(system)\n self.assertEqual(Ob.shape, (2, 2))\n self.assertFalse(IsObservable(system))\n self.assertFalse(IsDetectable(sys=system))\n self.assertFalse(IsDetectable(sys=system, threshold=1e-6))\n\n system = AffineSystem(A, B, f0, C, D, y0, .1)\n self.assertEqual(system.get_input_port(0), system.get_input_port())\n self.assertEqual(system.get_output_port(0), system.get_output_port())\n context = system.CreateDefaultContext()\n self.assertEqual(system.get_input_port(0).size(), 1)\n self.assertEqual(context.get_discrete_state_vector().size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertTrue((system.A() == A).all())\n self.assertTrue((system.B() == B).all())\n self.assertTrue((system.f0() == f0).all())\n self.assertTrue((system.C() == C).all())\n self.assertEqual(system.D(), D)\n self.assertEqual(system.y0(), y0)\n self.assertEqual(system.time_period(), .1)\n\n system.get_input_port(0).FixValue(context, 0)\n linearized = Linearize(system, context)\n self.assertTrue((linearized.A() == A).all())\n taylor = FirstOrderTaylorApproximation(system, context)\n self.assertTrue((taylor.y0() == y0).all())\n\n new_A = np.array([[1, 2], [3, 4]])\n new_B = np.array([[5], [6]])\n new_f0 = np.array([[7], [8]])\n new_C = np.array([[9, 10]])\n new_D = np.array([[11]])\n new_y0 = np.array([12])\n system.UpdateCoefficients(\n A=new_A, B=new_B, f0=new_f0, C=new_C, D=new_D, y0=new_y0\n )\n np.testing.assert_equal(new_A, system.A())\n np.testing.assert_equal(new_B, system.B())\n np.testing.assert_equal(new_f0.flatten(), system.f0())\n np.testing.assert_equal(new_C, system.C())\n np.testing.assert_equal(new_D, system.D())\n np.testing.assert_equal(new_y0, system.y0())\n\n system = MatrixGain(D=A)\n self.assertTrue((system.D() == A).all())\n\n system = TrajectoryAffineSystem(\n PiecewisePolynomial(A),\n PiecewisePolynomial(B),\n PiecewisePolynomial(f0),\n PiecewisePolynomial(C),\n PiecewisePolynomial(D),\n PiecewisePolynomial(y0),\n .1)\n self.assertEqual(system.get_input_port(0), system.get_input_port())\n self.assertEqual(system.get_output_port(0), system.get_output_port())\n context = system.CreateDefaultContext()\n self.assertEqual(system.get_input_port(0).size(), 1)\n self.assertEqual(context.get_discrete_state_vector().size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n for t in np.linspace(0., 1., 5):\n self.assertTrue((system.A(t) == A).all())\n self.assertTrue((system.B(t) == B).all())\n self.assertTrue((system.f0(t) == f0).all())\n self.assertTrue((system.C(t) == C).all())\n self.assertEqual(system.D(t), D)\n self.assertEqual(system.y0(t), y0)\n self.assertEqual(system.time_period(), .1)\n x0 = np.array([1, 2])\n system.configure_default_state(x0=x0)\n system.SetDefaultContext(context)\n np.testing.assert_equal(\n context.get_discrete_state_vector().CopyToVector(), x0)\n generator = RandomGenerator()\n system.SetRandomContext(context, generator)\n np.testing.assert_equal(\n context.get_discrete_state_vector().CopyToVector(), x0)\n system.configure_random_state(covariance=np.eye(2))\n system.SetRandomContext(context, generator)\n self.assertNotEqual(\n context.get_discrete_state_vector().CopyToVector()[1], x0[1])\n\n system = TrajectoryLinearSystem(\n A=PiecewisePolynomial(A),\n B=PiecewisePolynomial(B),\n C=PiecewisePolynomial(C),\n D=PiecewisePolynomial(D),\n time_period=0.1)\n self.assertEqual(system.time_period(), .1)\n system.configure_default_state(x0=np.array([1, 2]))\n system.configure_random_state(covariance=np.eye(2))\n\n def test_linear_affine_system_empty_matrices(self):\n # Confirm the default values for the system matrices in the\n # constructor.\n def CheckSizes(system, num_states, num_inputs, num_outputs):\n self.assertEqual(system.num_continuous_states(), num_states)\n self.assertEqual(system.num_inputs(), num_inputs)\n self.assertEqual(system.num_outputs(), num_outputs)\n\n # A constant vector system.\n system = AffineSystem(y0=[2, 1])\n CheckSizes(system, num_states=0, num_inputs=0, num_outputs=2)\n\n # A matrix gain.\n system = AffineSystem(D=np.eye(2))\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n system = LinearSystem(D=np.eye(2))\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n\n # Add an offset.\n system = AffineSystem(D=np.eye(2), y0=[1, 2])\n CheckSizes(system, num_states=0, num_inputs=2, num_outputs=2)\n\n # An integrator.\n system = LinearSystem(B=np.eye(2))\n CheckSizes(system, num_states=2, num_inputs=2, num_outputs=0)\n\n def test_linear_system_zero_size(self):\n # Explicitly test #12633.\n num_x = 0\n num_y = 2\n num_u = 2\n A = np.zeros((num_x, num_x))\n B = np.zeros((num_x, num_u))\n C = np.zeros((num_y, num_x))\n D = np.zeros((num_y, num_u))\n self.assertIsNotNone(LinearSystem(A, B, C, D))\n\n @numpy_compare.check_nonsymbolic_types\n def test_linear_transform_density(self, T):\n dut = LinearTransformDensity_[T](\n distribution=RandomDistribution.kGaussian,\n input_size=3,\n output_size=3)\n w_in = np.array([T(0.5), T(0.1), T(1.5)])\n context = dut.CreateDefaultContext()\n dut.get_input_port_w_in().FixValue(context, w_in)\n self.assertEqual(dut.get_input_port_A().size(), 9)\n self.assertEqual(dut.get_input_port_b().size(), 3)\n self.assertEqual(dut.get_distribution(), RandomDistribution.kGaussian)\n A = np.array([\n [T(0.5), T(1), T(2)], [T(1), T(2), T(3)], [T(3), T(4), T(5)]])\n dut.FixConstantA(context=context, A=A)\n b = np.array([T(1), T(2), T(3)])\n dut.FixConstantB(context=context, b=b)\n\n dut.CalcDensity(context=context)\n\n self.assertEqual(dut.get_output_port_w_out().size(), 3)\n self.assertEqual(dut.get_output_port_w_out_density().size(), 1)\n\n def test_vector_pass_through(self):\n model_value = BasicVector([1., 2, 3])\n system = PassThrough(vector_size=model_value.size())\n context = system.CreateDefaultContext()\n system.get_input_port(0).FixValue(context, model_value)\n output = system.AllocateOutput()\n input_eval = system.EvalVectorInput(context, 0)\n compare_value(self, input_eval, model_value)\n system.CalcOutput(context, output)\n output_value = output.get_vector_data(0)\n compare_value(self, output_value, model_value)\n\n def test_default_vector_pass_through(self):\n model_value = [1., 2, 3]\n system = PassThrough(value=model_value)\n context = system.CreateDefaultContext()\n np.testing.assert_array_equal(\n model_value, system.get_output_port().Eval(context))\n\n def test_abstract_pass_through(self):\n model_value = Value(\"Hello world\")\n system = PassThrough(abstract_model_value=model_value)\n context = system.CreateDefaultContext()\n system.get_input_port(0).FixValue(context, model_value)\n output = system.AllocateOutput()\n input_eval = system.EvalAbstractInput(context, 0)\n compare_value(self, input_eval, model_value)\n system.CalcOutput(context, output)\n output_value = output.get_data(0)\n compare_value(self, output_value, model_value)\n\n def test_port_switch(self):\n system = PortSwitch(vector_size=2)\n a = system.DeclareInputPort(name=\"a\")\n system.DeclareInputPort(name=\"b\")\n context = system.CreateDefaultContext()\n self.assertIsInstance(a, InputPort)\n system.get_port_selector_input_port().FixValue(context, a.get_index())\n\n def test_first_order_low_pass_filter(self):\n filter1 = FirstOrderLowPassFilter(time_constant=3.0, size=4)\n self.assertEqual(filter1.get_time_constant(), 3.0)\n\n alpha = np.array([1, 2, 3])\n filter2 = FirstOrderLowPassFilter(time_constants=alpha)\n np.testing.assert_array_equal(filter2.get_time_constants_vector(),\n alpha)\n\n context = filter2.CreateDefaultContext()\n filter2.set_initial_output_value(context, [0., -0.2, 0.4])\n\n def test_gain(self):\n k = 42.\n input_size = 10\n systems = [Gain(k=k, size=input_size),\n Gain(k=k*np.ones(input_size))]\n\n for system in systems:\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n system.get_input_port(0).FixValue(context, input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(\n 0).CopyToVector(), expected))\n\n test_input = np.arange(input_size)\n mytest(np.arange(input_size), k*np.arange(input_size))\n\n def test_saturation(self):\n system = Saturation((0., -1., 3.), (1., 2., 4.))\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n system.get_input_port(0).FixValue(context, input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(\n 0).CopyToVector(), expected))\n\n mytest((-5., 5., 4.), (0., 2., 4.))\n mytest((.4, 0., 3.5), (.4, 0., 3.5))\n\n def test_trajectory_source(self):\n ppt = PiecewisePolynomial.FirstOrderHold(\n [0., 1.], [[2., 3.], [2., 1.]])\n system = TrajectorySource(trajectory=ppt,\n output_derivative_order=0,\n zero_derivatives_beyond_limits=True)\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n context.SetTime(input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(\n 0).CopyToVector(), expected))\n\n mytest(0.0, (2.0, 2.0))\n mytest(0.5, (2.5, 1.5))\n mytest(1.0, (3.0, 1.0))\n\n ppt2 = PiecewisePolynomial.FirstOrderHold(\n [0., 1.], [[4., 6.], [4., 2.]])\n system.UpdateTrajectory(trajectory=ppt2)\n mytest(0.0, (4.0, 4.0))\n mytest(0.5, (5.0, 3.0))\n mytest(1.0, (6.0, 2.0))\n\n def test_symbolic_vector_system(self):\n t = Variable(\"t\")\n x = [Variable(\"x0\"), Variable(\"x1\")]\n u = [Variable(\"u0\"), Variable(\"u1\")]\n system = SymbolicVectorSystem(time=t, state=x, input=u,\n dynamics=[x[0] + x[1], t],\n output=[u[1]],\n time_period=0.0)\n context = system.CreateDefaultContext()\n\n self.assertEqual(context.num_continuous_states(), 2)\n self.assertEqual(context.num_discrete_state_groups(), 0)\n self.assertEqual(system.get_input_port(0).size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertEqual(context.num_abstract_parameters(), 0)\n self.assertEqual(context.num_numeric_parameter_groups(), 0)\n self.assertTrue(system.dynamics_for_variable(x[0])\n .EqualTo(x[0] + x[1]))\n self.assertTrue(system.dynamics_for_variable(x[1])\n .EqualTo(t))\n\n def test_symbolic_vector_system_parameters(self):\n t = Variable(\"t\")\n x = [Variable(\"x0\"), Variable(\"x1\")]\n u = [Variable(\"u0\"), Variable(\"u1\")]\n p = [Variable(\"p0\"), Variable(\"p1\")]\n system = SymbolicVectorSystem(time=t, state=x, input=u,\n parameter=p,\n dynamics=[p[0] * x[0] + x[1] + p[1], t],\n output=[u[1]],\n time_period=0.0)\n context = system.CreateDefaultContext()\n\n self.assertEqual(context.num_continuous_states(), 2)\n self.assertEqual(context.num_discrete_state_groups(), 0)\n self.assertEqual(system.get_input_port(0).size(), 2)\n self.assertEqual(system.get_output_port(0).size(), 1)\n self.assertEqual(context.num_abstract_parameters(), 0)\n self.assertEqual(context.num_numeric_parameter_groups(), 1)\n self.assertEqual(context.get_numeric_parameter(0).size(), 2)\n self.assertTrue(system.dynamics_for_variable(x[0])\n .EqualTo(p[0] * x[0] + x[1] + p[1]))\n self.assertTrue(system.dynamics_for_variable(x[1])\n .EqualTo(t))\n\n def test_wrap_to_system(self):\n system = WrapToSystem(2)\n system.set_interval(1, 1., 2.)\n context = system.CreateDefaultContext()\n output = system.AllocateOutput()\n\n def mytest(input, expected):\n system.get_input_port(0).FixValue(context, input)\n system.CalcOutput(context, output)\n self.assertTrue(np.allclose(output.get_vector_data(\n 0).CopyToVector(), expected))\n\n mytest((-1.5, 0.5), (-1.5, 1.5))\n mytest((.2, .3), (.2, 1.3))\n\n def test_demultiplexer(self):\n # Test demultiplexer with scalar outputs.\n demux = Demultiplexer(size=4)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), 4)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(),\n [1, 1, 1, 1])\n\n input_vec = np.array([1., 2., 3., 4.])\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n\n for i in range(4):\n self.assertTrue(\n np.allclose(output.get_vector_data(i).get_value(),\n input_vec[i]))\n\n # Test demultiplexer with vector outputs.\n demux = Demultiplexer(size=4, output_ports_size=2)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), 2)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(), [2, 2])\n\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n\n for i in range(2):\n self.assertTrue(\n np.allclose(output.get_vector_data(i).get_value(),\n input_vec[2*i:2*i+2]))\n\n # Test demultiplexer with different output port sizes.\n output_ports_sizes = np.array([1, 2, 1])\n num_output_ports = output_ports_sizes.size\n input_vec = np.array([1., 2., 3., 4.])\n demux = Demultiplexer(output_ports_sizes=output_ports_sizes)\n context = demux.CreateDefaultContext()\n self.assertEqual(demux.num_input_ports(), 1)\n self.assertEqual(demux.num_output_ports(), num_output_ports)\n numpy_compare.assert_equal(demux.get_output_ports_sizes(),\n output_ports_sizes)\n\n demux.get_input_port(0).FixValue(context, input_vec)\n output = demux.AllocateOutput()\n demux.CalcOutput(context, output)\n\n output_port_start = 0\n for i in range(num_output_ports):\n output_port_size = output.get_vector_data(i).size()\n self.assertTrue(\n np.allclose(output.get_vector_data(i).get_value(),\n input_vec[output_port_start:\n output_port_start+output_port_size]))\n output_port_start += output_port_size\n\n def test_multiplexer(self):\n my_vector = MyVector2(data=[1., 2.])\n test_cases = [\n dict(has_vector=False, mux=Multiplexer(num_scalar_inputs=4),\n data=[[5.], [3.], [4.], [2.]]),\n dict(has_vector=False, mux=Multiplexer(input_sizes=[2, 3]),\n data=[[8., 4.], [3., 6., 9.]]),\n dict(has_vector=True, mux=Multiplexer(model_vector=my_vector),\n data=[[42.], [3.]]),\n ]\n for case in test_cases:\n mux = case['mux']\n port_size = sum([len(vec) for vec in case['data']])\n self.assertEqual(mux.get_output_port(0).size(), port_size)\n context = mux.CreateDefaultContext()\n output = mux.AllocateOutput()\n num_ports = len(case['data'])\n self.assertEqual(context.num_input_ports(), num_ports)\n for j, vec in enumerate(case['data']):\n mux.get_input_port(j).FixValue(context, vec)\n mux.CalcOutput(context, output)\n self.assertTrue(\n np.allclose(output.get_vector_data(0).get_value(),\n [elem for vec in case['data'] for elem in vec]))\n if case['has_vector']:\n # Check the type matches MyVector2.\n value = output.get_vector_data(0)\n self.assertTrue(isinstance(value, MyVector2))\n\n def test_multilayer_perceptron(self):\n mlp = MultilayerPerceptron(\n layers=[1, 2, 3], activation_type=PerceptronActivationType.kReLU)\n self.assertEqual(mlp.get_input_port().size(), 1)\n self.assertEqual(mlp.get_output_port().size(), 3)\n context = mlp.CreateDefaultContext()\n params = np.zeros((mlp.num_parameters(), 1))\n self.assertEqual(mlp.num_parameters(), 13)\n self.assertEqual(mlp.layers(), [1, 2, 3])\n self.assertEqual(mlp.activation_type(layer=0),\n PerceptronActivationType.kReLU)\n self.assertEqual(len(mlp.GetParameters(context=context)),\n mlp.num_parameters())\n mlp.SetWeights(context=context, layer=0, W=np.array([[1], [2]]))\n mlp.SetBiases(context=context, layer=0, b=[3, 4])\n np.testing.assert_array_equal(\n mlp.GetWeights(context=context, layer=0), np.array([[1], [2]]))\n np.testing.assert_array_equal(\n mlp.GetBiases(context=context, layer=0), np.array([3, 4]))\n params = np.zeros(mlp.num_parameters())\n mlp.SetWeights(params=params, layer=0, W=np.array([[1], [2]]))\n mlp.SetBiases(params=params, layer=0, b=[3, 4])\n np.testing.assert_array_equal(\n mlp.GetWeights(params=params, layer=0), np.array([[1], [2]]))\n np.testing.assert_array_equal(\n mlp.GetBiases(params=params, layer=0), np.array([3, 4]))\n mutable_params = mlp.GetMutableParameters(context=context)\n mutable_params[:] = 3.0\n np.testing.assert_array_equal(mlp.GetParameters(context),\n np.full(mlp.num_parameters(), 3.0))\n\n global called_loss\n called_loss = False\n\n def silly_loss(Y, dloss_dY):\n global called_loss\n called_loss = True\n # We must be careful to update the dloss in place, rather than bind\n # a new matrix to the same variable name.\n dloss_dY[:] = 1\n # dloss_dY = np.array(...etc...) # <== wrong\n return Y.sum()\n\n dloss_dparams = np.zeros((13,))\n generator = RandomGenerator(23)\n mlp.SetRandomContext(context, generator)\n mlp.Backpropagation(context=context,\n X=np.array([1, 3, 4]).reshape((1, 3)),\n loss=silly_loss,\n dloss_dparams=dloss_dparams)\n self.assertTrue(called_loss)\n self.assertTrue(dloss_dparams.any()) # No longer all zero.\n\n dloss_dparams = np.zeros((13,))\n mlp.BackpropagationMeanSquaredError(context=context,\n X=np.array([1, 3, 4]).reshape(\n (1, 3)),\n Y_desired=np.eye(3),\n dloss_dparams=dloss_dparams)\n self.assertTrue(dloss_dparams.any()) # No longer all zero.\n\n Y = np.asfortranarray(np.eye(3))\n mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]), Y=Y)\n self.assertFalse(np.allclose(Y, np.eye(3)))\n Y2 = mlp.BatchOutput(context=context, X=np.array([[0.1, 0.3, 0.4]]))\n np.testing.assert_array_equal(Y, Y2)\n\n mlp2 = MultilayerPerceptron(layers=[3, 2, 1],\n activation_types=[\n PerceptronActivationType.kReLU,\n PerceptronActivationType.kTanh\n ])\n self.assertEqual(mlp2.activation_type(0),\n PerceptronActivationType.kReLU)\n self.assertEqual(mlp2.activation_type(1),\n PerceptronActivationType.kTanh)\n Y = np.asfortranarray(np.full((1, 3), 2.4))\n dYdX = np.asfortranarray(np.full((3, 3), 5.3))\n context2 = mlp2.CreateDefaultContext()\n mlp2.BatchOutput(context=context2, X=np.eye(3), Y=Y, dYdX=dYdX)\n # The default context sets the weights and biases to zero, so the\n # output (and gradients) should be zero.\n np.testing.assert_array_almost_equal(Y, np.zeros((1, 3)))\n np.testing.assert_array_almost_equal(dYdX, np.zeros((3, 3)))\n\n mlp = MultilayerPerceptron(use_sin_cos_for_input=[True, False],\n remaining_layers=[3, 2],\n activation_types=[\n PerceptronActivationType.kReLU,\n PerceptronActivationType.kTanh\n ])\n self.assertEqual(mlp.get_input_port().size(), 2)\n np.testing.assert_array_equal(mlp.layers(), [3, 3, 2])\n\n def test_random_source(self):\n source = RandomSource(distribution=RandomDistribution.kUniform,\n num_outputs=2, sampling_interval_sec=0.01)\n self.assertEqual(source.get_output_port(0).size(), 2)\n\n builder = DiagramBuilder()\n # Note: There are no random inputs to add to the empty diagram, but it\n # confirms the API works.\n AddRandomInputs(sampling_interval_sec=0.01, builder=builder)\n\n builder_ad = DiagramBuilder_[AutoDiffXd]()\n AddRandomInputs(sampling_interval_sec=0.01, builder=builder_ad)\n\n def test_constant_vector_source(self):\n source = ConstantVectorSource(source_value=[1., 2.])\n context = source.CreateDefaultContext()\n source.get_source_value(context)\n source.get_mutable_source_value(context)\n\n def test_ctor_api(self):\n \"\"\"Tests construction of systems for systems whose executions semantics\n are not tested above.\n \"\"\"\n ConstantValueSource(Value(\"Hello world\"))\n DiscreteTimeDelay(update_sec=0.1, delay_time_steps=5, vector_size=2)\n DiscreteTimeDelay(\n update_sec=0.1, delay_time_steps=5,\n abstract_model_value=Value(\"Hello world\"))\n with catch_drake_warnings(expected_count=2) as w:\n DiscreteTimeDelay(update_sec=0.1, delay_timesteps=5, vector_size=2)\n DiscreteTimeDelay(\n update_sec=0.1, delay_timesteps=5,\n abstract_model_value=Value(\"Hello world\"))\n\n ZeroOrderHold(period_sec=0.1, offset_sec=0.0, vector_size=2)\n dut = ZeroOrderHold(period_sec=1.0, offset_sec=0.25,\n abstract_model_value=Value(\"Hello world\"))\n self.assertEqual(dut.period(), 1.0)\n self.assertEqual(dut.offset(), 0.25)\n\n def test_shared_pointer_system_ctor(self):\n dut = SharedPointerSystem(value_to_hold=[1, 2, 3])\n readback = dut.get()\n self.assertListEqual(readback, [1, 2, 3])\n del dut\n self.assertListEqual(readback, [1, 2, 3])\n\n def test_shared_pointer_system_builder(self):\n builder = DiagramBuilder()\n self.assertListEqual(\n SharedPointerSystem.AddToBuilder(\n builder=builder, value_to_hold=[1, 2, 3]),\n [1, 2, 3])\n diagram = builder.Build()\n del builder\n readback = diagram.GetSystems()[0].get()\n self.assertListEqual(readback, [1, 2, 3])\n del diagram\n self.assertListEqual(readback, [1, 2, 3])\n\n def test_sine(self):\n # Test scalar output.\n sine_source = Sine(amplitude=1, frequency=2, phase=3,\n size=1, is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 1)\n self.assertEqual(sine_source.get_output_port(1).size(), 1)\n self.assertEqual(sine_source.get_output_port(2).size(), 1)\n\n # Test vector output.\n sine_source = Sine(amplitude=1, frequency=2, phase=3,\n size=3, is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 3)\n self.assertEqual(sine_source.get_output_port(1).size(), 3)\n self.assertEqual(sine_source.get_output_port(2).size(), 3)\n\n sine_source = Sine(amplitudes=np.ones(2), frequencies=np.ones(2),\n phases=np.ones(2), is_time_based=True)\n self.assertEqual(sine_source.get_output_port(0).size(), 2)\n self.assertEqual(sine_source.get_output_port(1).size(), 2)\n self.assertEqual(sine_source.get_output_port(2).size(), 2)\n\n def test_discrete_derivative(self):\n discrete_derivative = DiscreteDerivative(num_inputs=5, time_step=0.5)\n self.assertEqual(discrete_derivative.get_input_port(0).size(), 5)\n self.assertEqual(discrete_derivative.get_output_port(0).size(), 5)\n self.assertEqual(discrete_derivative.time_step(), 0.5)\n self.assertTrue(discrete_derivative.suppress_initial_transient())\n\n discrete_derivative = DiscreteDerivative(\n num_inputs=5, time_step=0.5, suppress_initial_transient=False)\n self.assertFalse(discrete_derivative.suppress_initial_transient())\n\n def test_state_interpolator_with_discrete_derivative(self):\n state_interpolator = StateInterpolatorWithDiscreteDerivative(\n num_positions=5, time_step=0.4)\n self.assertEqual(state_interpolator.get_input_port(0).size(), 5)\n self.assertEqual(state_interpolator.get_output_port(0).size(), 10)\n self.assertTrue(state_interpolator.suppress_initial_transient())\n\n # test set_initial_position using context\n context = state_interpolator.CreateDefaultContext()\n state_interpolator.set_initial_position(\n context=context, position=5*[1.1])\n np.testing.assert_array_equal(\n context.get_discrete_state(0).CopyToVector(),\n np.array(5*[1.1]))\n np.testing.assert_array_equal(\n context.get_discrete_state(1).CopyToVector(),\n np.array(5*[1.1]))\n\n # test set_initial_position using state\n context = state_interpolator.CreateDefaultContext()\n state_interpolator.set_initial_position(\n state=context.get_state(), position=5*[1.3])\n np.testing.assert_array_equal(\n context.get_discrete_state(0).CopyToVector(),\n np.array(5*[1.3]))\n np.testing.assert_array_equal(\n context.get_discrete_state(1).CopyToVector(),\n np.array(5*[1.3]))\n\n state_interpolator = StateInterpolatorWithDiscreteDerivative(\n num_positions=5, time_step=0.4, suppress_initial_transient=True)\n self.assertTrue(state_interpolator.suppress_initial_transient())\n\n @numpy_compare.check_nonsymbolic_types\n def test_log_vector_output(self, T):\n # Add various redundant loggers to a system, to exercise the\n # LogVectorOutput bindings.\n builder = DiagramBuilder_[T]()\n kSize = 1\n integrator = builder.AddSystem(Integrator_[T](kSize))\n port = integrator.get_output_port(0)\n loggers = []\n loggers.append(LogVectorOutput(port, builder))\n loggers.append(LogVectorOutput(src=port, builder=builder))\n loggers.append(LogVectorOutput(port, builder, 0.125))\n loggers.append(LogVectorOutput(\n src=port, builder=builder, publish_period=0.125))\n\n loggers.append(LogVectorOutput(port, builder, {TriggerType.kForced}))\n loggers.append(LogVectorOutput(\n src=port, builder=builder, publish_triggers={TriggerType.kForced}))\n loggers.append(LogVectorOutput(\n port, builder, {TriggerType.kPeriodic}, 0.125))\n loggers.append(LogVectorOutput(\n src=port, builder=builder,\n publish_triggers={TriggerType.kPeriodic}, publish_period=0.125))\n\n # Check the returned loggers by calling some trivial methods.\n diagram = builder.Build()\n context = diagram.CreateDefaultContext()\n self.assertTrue(all(logger.FindLog(context).num_samples() == 0\n for logger in loggers))\n\n @numpy_compare.check_nonsymbolic_types\n def test_vector_log(self, T):\n kSize = 1\n dut = VectorLog(kSize)\n self.assertEqual(dut.get_input_size(), kSize)\n dut.AddData(0.1, [22.22])\n self.assertEqual(dut.num_samples(), 1)\n self.assertEqual(dut.sample_times(), [0.1])\n self.assertEqual(dut.data(), [22.22])\n dut.Clear()\n self.assertEqual(dut.num_samples(), 0)\n # There is no good way from python to test the semantics of Reserve(),\n # but test the binding anyway.\n dut.Reserve(VectorLog.kDefaultCapacity * 3)\n\n @numpy_compare.check_nonsymbolic_types\n def test_vector_log_sink(self, T):\n # Add various redundant loggers to a system, to exercise the\n # VectorLog constructor bindings.\n builder = DiagramBuilder_[T]()\n kSize = 1\n constructors = [VectorLogSink_[T]]\n loggers = []\n if T == float:\n constructors.append(VectorLogSink)\n for constructor in constructors:\n loggers.append(builder.AddSystem(constructor(kSize)))\n loggers.append(builder.AddSystem(constructor(input_size=kSize)))\n loggers.append(builder.AddSystem(constructor(kSize, 0.125)))\n loggers.append(builder.AddSystem(\n constructor(input_size=kSize, publish_period=0.125)))\n loggers.append(builder.AddSystem(\n constructor(kSize, {TriggerType.kForced})))\n loggers.append(builder.AddSystem(\n constructor(input_size=kSize,\n publish_triggers={TriggerType.kForced})))\n loggers.append(builder.AddSystem(\n constructor(kSize, {TriggerType.kPeriodic}, 0.125)))\n loggers.append(builder.AddSystem(\n constructor(input_size=kSize,\n publish_triggers={TriggerType.kPeriodic},\n publish_period=0.125)))\n\n # Exercise all of the log access methods.\n diagram = builder.Build()\n context = diagram.CreateDefaultContext()\n # FindLog and FindMutableLog find the same object.\n self.assertTrue(\n all(logger.FindLog(context) == logger.FindMutableLog(context)\n for logger in loggers))\n # Build a list of pairs of loggers and their local contexts.\n loggers_and_contexts = [(x, x.GetMyContextFromRoot(context))\n for x in loggers]\n # GetLog and GetMutableLog find the same object.\n self.assertTrue(\n all(logger.GetLog(logger_context)\n == logger.GetMutableLog(logger_context)\n for logger, logger_context in loggers_and_contexts))\n # GetLog and FindLog find the same object, given the proper contexts.\n self.assertTrue(\n all(logger.GetLog(logger_context) == logger.FindLog(context)\n for logger, logger_context in loggers_and_contexts))\n",
"step-ids": [
25,
26,
28,
30,
35
]
}
|
[
25,
26,
28,
30,
35
] |
#!usr/bin/python
#--*--coding:utf-8--*--
import sys
import re
if __name__ == '__main__':
category = re.compile('\[\[Category\:.*\]\]')#.は改行以外の任意の文字列にマッチ
for line in open(sys.argv[1]):
if category.search(line) is not None:#比較にはisを用いなければならない
print line.strip()
|
normal
|
{
"blob_id": "14b6dc403be76abef5fde2cca5d773c88faa4b40",
"index": 6083,
"step-1": "#!usr/bin/python\n#--*--coding:utf-8--*--\n\nimport sys\nimport re\n\nif __name__ == '__main__':\n category = re.compile('\\[\\[Category\\:.*\\]\\]')#.は改行以外の任意の文字列にマッチ\n for line in open(sys.argv[1]):\n if category.search(line) is not None:#比較にはisを用いなければならない\n print line.strip()",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
def getRegionClass(image_path, data_id, imgName):
region_class = ['nosmoke_background', 'nosmoke_face', 'nosmoke_suspect',
'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']
label_class = ['nosmoke_bg', 'nosmoke_face', 'nosmoke_susp',
'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']
select_class = None
for class_id in range(len(region_class)):
cur_class = region_class[class_id]
cur_label_class = label_class[class_id]
check_file_name = os.path.join(image_path, data_id, cur_class, imgName)
if os.path.isfile(check_file_name):
select_class = cur_label_class
break
return select_class
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def getRegionClass(image_path, data_id, imgName):
region_class = ['nosmoke_background', 'nosmoke_face', 'nosmoke_suspect',
'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']
label_class = ['nosmoke_bg', 'nosmoke_face', 'nosmoke_susp',
'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']
select_class = None
for class_id in range(len(region_class)):
cur_class = region_class[class_id]
cur_label_class = label_class[class_id]
check_file_name = os.path.join(image_path, data_id, cur_class, imgName)
if os.path.isfile(check_file_name):
select_class = cur_label_class
break
return select_class
def add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir):
if not os.path.exists(dst_json_dir):
os.makedirs(dst_json_dir)
smoke_hand_num, smoke_nohand_num, smoke_hard_num = 0, 0, 0
(nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num
) = 0, 0, 0, 0
for json_file_name in glob.glob(org_json_dir + '/*.json'):
json_file = open(json_file_name, 'r')
base_file_id = os.path.basename(json_file_name)[:-5]
print(base_file_id + '.json')
json_lines = json_file.read().splitlines()
dst_json_lines = []
new_json_file = codecs.open(dst_json_dir + '/' + base_file_id +
'.json', 'w', 'utf-8')
new_json_file.close()
new_json_file = codecs.open(dst_json_dir + '/' + base_file_id +
'.json', 'a+', 'utf-8')
for line in json_lines:
if line[0] == '#':
new_json_file.write(line + '\n')
continue
js = json.loads(line, object_pairs_hook=OrderedDict)
imgName = js['image_key']
select_class = getRegionClass(done_root_dir, base_file_id, imgName)
if select_class == None:
new_json_file.write(line + '\n')
continue
new_common_box = {}
new_attrs = {}
new_attrs['ignore'] = 'no'
new_attrs['type'] = 'smoke_region'
new_attrs['class'] = select_class
new_common_box['attrs'] = new_attrs
if select_class == 'smoke_hard':
new_attrs['ignore'] = 'yes'
if select_class == 'smoke_hand':
smoke_hand_num += 1
elif select_class == 'smoke_nohand':
smoke_nohand_num += 1
elif select_class == 'smoke_hard':
smoke_hard_num += 1
elif select_class == 'nosmoke_bg':
nosmoke_bg_num += 1
elif select_class == 'nosmoke_face':
nosmoke_face_num += 1
elif select_class == 'nosmoke_susp':
nosmoke_susp_num += 1
elif select_class == 'nosmoke_cover':
nosmoke_cover_num += 1
else:
print('Invalid smoke class.', select_class)
if 'common_box' in js:
js['common_box'].append(new_common_box)
else:
js['common_box'] = [new_common_box]
new_js_line = json.dumps(js) + '\n'
new_json_file.write(new_js_line)
new_json_file.close()
print('write ' + base_file_id + '.json')
print('add_common_box_smoke_region done.')
print('smoke_hand:%d, smoke_nohand:%d, smoke_hard:%d' % (smoke_hand_num,
smoke_nohand_num, smoke_hard_num))
print(
'nosmoke_bg:%d, nosmoke_face:%d, nosmoke_susp:%d, nosmoke_cover:%d' %
(nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num)
)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def getRegionClass(image_path, data_id, imgName):
region_class = ['nosmoke_background', 'nosmoke_face', 'nosmoke_suspect',
'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']
label_class = ['nosmoke_bg', 'nosmoke_face', 'nosmoke_susp',
'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']
select_class = None
for class_id in range(len(region_class)):
cur_class = region_class[class_id]
cur_label_class = label_class[class_id]
check_file_name = os.path.join(image_path, data_id, cur_class, imgName)
if os.path.isfile(check_file_name):
select_class = cur_label_class
break
return select_class
def add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir):
if not os.path.exists(dst_json_dir):
os.makedirs(dst_json_dir)
smoke_hand_num, smoke_nohand_num, smoke_hard_num = 0, 0, 0
(nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num
) = 0, 0, 0, 0
for json_file_name in glob.glob(org_json_dir + '/*.json'):
json_file = open(json_file_name, 'r')
base_file_id = os.path.basename(json_file_name)[:-5]
print(base_file_id + '.json')
json_lines = json_file.read().splitlines()
dst_json_lines = []
new_json_file = codecs.open(dst_json_dir + '/' + base_file_id +
'.json', 'w', 'utf-8')
new_json_file.close()
new_json_file = codecs.open(dst_json_dir + '/' + base_file_id +
'.json', 'a+', 'utf-8')
for line in json_lines:
if line[0] == '#':
new_json_file.write(line + '\n')
continue
js = json.loads(line, object_pairs_hook=OrderedDict)
imgName = js['image_key']
select_class = getRegionClass(done_root_dir, base_file_id, imgName)
if select_class == None:
new_json_file.write(line + '\n')
continue
new_common_box = {}
new_attrs = {}
new_attrs['ignore'] = 'no'
new_attrs['type'] = 'smoke_region'
new_attrs['class'] = select_class
new_common_box['attrs'] = new_attrs
if select_class == 'smoke_hard':
new_attrs['ignore'] = 'yes'
if select_class == 'smoke_hand':
smoke_hand_num += 1
elif select_class == 'smoke_nohand':
smoke_nohand_num += 1
elif select_class == 'smoke_hard':
smoke_hard_num += 1
elif select_class == 'nosmoke_bg':
nosmoke_bg_num += 1
elif select_class == 'nosmoke_face':
nosmoke_face_num += 1
elif select_class == 'nosmoke_susp':
nosmoke_susp_num += 1
elif select_class == 'nosmoke_cover':
nosmoke_cover_num += 1
else:
print('Invalid smoke class.', select_class)
if 'common_box' in js:
js['common_box'].append(new_common_box)
else:
js['common_box'] = [new_common_box]
new_js_line = json.dumps(js) + '\n'
new_json_file.write(new_js_line)
new_json_file.close()
print('write ' + base_file_id + '.json')
print('add_common_box_smoke_region done.')
print('smoke_hand:%d, smoke_nohand:%d, smoke_hard:%d' % (smoke_hand_num,
smoke_nohand_num, smoke_hard_num))
print(
'nosmoke_bg:%d, nosmoke_face:%d, nosmoke_susp:%d, nosmoke_cover:%d' %
(nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num)
)
if __name__ == '__main__':
if len(sys.argv) < 2:
print(
'useage: add_common_box_smoke_region.py org_json_dir dst_json_dir done_root_dir'
)
exit()
org_json_dir = sys.argv[1]
dst_json_dir = sys.argv[2]
done_root_dir = sys.argv[3]
add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir)
<|reserved_special_token_1|>
import os
import sys
import glob
import shutil
import json
import codecs
from collections import OrderedDict
def getRegionClass(image_path, data_id, imgName):
region_class = ['nosmoke_background', 'nosmoke_face', 'nosmoke_suspect',
'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']
label_class = ['nosmoke_bg', 'nosmoke_face', 'nosmoke_susp',
'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']
select_class = None
for class_id in range(len(region_class)):
cur_class = region_class[class_id]
cur_label_class = label_class[class_id]
check_file_name = os.path.join(image_path, data_id, cur_class, imgName)
if os.path.isfile(check_file_name):
select_class = cur_label_class
break
return select_class
def add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir):
if not os.path.exists(dst_json_dir):
os.makedirs(dst_json_dir)
smoke_hand_num, smoke_nohand_num, smoke_hard_num = 0, 0, 0
(nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num
) = 0, 0, 0, 0
for json_file_name in glob.glob(org_json_dir + '/*.json'):
json_file = open(json_file_name, 'r')
base_file_id = os.path.basename(json_file_name)[:-5]
print(base_file_id + '.json')
json_lines = json_file.read().splitlines()
dst_json_lines = []
new_json_file = codecs.open(dst_json_dir + '/' + base_file_id +
'.json', 'w', 'utf-8')
new_json_file.close()
new_json_file = codecs.open(dst_json_dir + '/' + base_file_id +
'.json', 'a+', 'utf-8')
for line in json_lines:
if line[0] == '#':
new_json_file.write(line + '\n')
continue
js = json.loads(line, object_pairs_hook=OrderedDict)
imgName = js['image_key']
select_class = getRegionClass(done_root_dir, base_file_id, imgName)
if select_class == None:
new_json_file.write(line + '\n')
continue
new_common_box = {}
new_attrs = {}
new_attrs['ignore'] = 'no'
new_attrs['type'] = 'smoke_region'
new_attrs['class'] = select_class
new_common_box['attrs'] = new_attrs
if select_class == 'smoke_hard':
new_attrs['ignore'] = 'yes'
if select_class == 'smoke_hand':
smoke_hand_num += 1
elif select_class == 'smoke_nohand':
smoke_nohand_num += 1
elif select_class == 'smoke_hard':
smoke_hard_num += 1
elif select_class == 'nosmoke_bg':
nosmoke_bg_num += 1
elif select_class == 'nosmoke_face':
nosmoke_face_num += 1
elif select_class == 'nosmoke_susp':
nosmoke_susp_num += 1
elif select_class == 'nosmoke_cover':
nosmoke_cover_num += 1
else:
print('Invalid smoke class.', select_class)
if 'common_box' in js:
js['common_box'].append(new_common_box)
else:
js['common_box'] = [new_common_box]
new_js_line = json.dumps(js) + '\n'
new_json_file.write(new_js_line)
new_json_file.close()
print('write ' + base_file_id + '.json')
print('add_common_box_smoke_region done.')
print('smoke_hand:%d, smoke_nohand:%d, smoke_hard:%d' % (smoke_hand_num,
smoke_nohand_num, smoke_hard_num))
print(
'nosmoke_bg:%d, nosmoke_face:%d, nosmoke_susp:%d, nosmoke_cover:%d' %
(nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num)
)
if __name__ == '__main__':
if len(sys.argv) < 2:
print(
'useage: add_common_box_smoke_region.py org_json_dir dst_json_dir done_root_dir'
)
exit()
org_json_dir = sys.argv[1]
dst_json_dir = sys.argv[2]
done_root_dir = sys.argv[3]
add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir)
<|reserved_special_token_1|>
import os
import sys
import glob
import shutil
import json
import codecs
from collections import OrderedDict
def getRegionClass(image_path, data_id, imgName):
region_class = ['nosmoke_background', 'nosmoke_face', 'nosmoke_suspect', 'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']
label_class = ['nosmoke_bg', 'nosmoke_face', 'nosmoke_susp', 'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']
select_class = None
for class_id in range(len(region_class)):
cur_class = region_class[class_id]
cur_label_class = label_class[class_id]
check_file_name = os.path.join(image_path, data_id, cur_class, imgName)
if os.path.isfile(check_file_name):
select_class = cur_label_class
#print check_file_name
break
return select_class
def add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir):
if not os.path.exists(dst_json_dir):
os.makedirs(dst_json_dir)
smoke_hand_num, smoke_nohand_num, smoke_hard_num = 0, 0, 0
nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num = 0, 0, 0, 0
for json_file_name in glob.glob(org_json_dir + '/*.json'):
json_file = open(json_file_name, 'r')
base_file_id = os.path.basename(json_file_name)[:-5]
print(base_file_id + '.json')
json_lines = json_file.read().splitlines()
dst_json_lines = []
new_json_file = codecs.open(dst_json_dir + '/' + base_file_id + '.json', "w", "utf-8")
new_json_file.close()
new_json_file = codecs.open(dst_json_dir + '/' + base_file_id + '.json', "a+", 'utf-8')
for line in json_lines:
if line[0] == '#':
new_json_file.write(line + '\n')
continue
js = json.loads(line, object_pairs_hook=OrderedDict)
#new_js_line = json.dumps(js) + "\n"
#new_json_file.write(new_js_line)
#continue
imgName = js["image_key"]
select_class = getRegionClass(done_root_dir, base_file_id, imgName)
if select_class == None:
new_json_file.write(line + '\n') #
#print('Not Found: ', done_root_dir, base_file_id, imgName)
continue
#print select_class
new_common_box = {}
new_attrs = {}
new_attrs['ignore'] = 'no'
new_attrs['type'] = 'smoke_region'
new_attrs['class'] = select_class
new_common_box['attrs'] = new_attrs
if select_class == 'smoke_hard':
new_attrs['ignore'] = 'yes'
# statistic
if select_class == 'smoke_hand':
smoke_hand_num += 1
elif select_class == 'smoke_nohand':
smoke_nohand_num += 1
elif select_class == 'smoke_hard':
smoke_hard_num += 1
elif select_class == 'nosmoke_bg':
nosmoke_bg_num += 1
elif select_class == 'nosmoke_face':
nosmoke_face_num += 1
elif select_class == 'nosmoke_susp':
nosmoke_susp_num += 1
elif select_class == 'nosmoke_cover':
nosmoke_cover_num += 1
else:
print('Invalid smoke class.', select_class)
# common box, like phone, hand
if 'common_box' in js:
js['common_box'].append(new_common_box)
else:
js['common_box'] = [new_common_box]
new_js_line = json.dumps(js) + "\n"
new_json_file.write(new_js_line)
new_json_file.close()
print('write ' + base_file_id + '.json')
print('add_common_box_smoke_region done.')
print('smoke_hand:%d, smoke_nohand:%d, smoke_hard:%d'%(smoke_hand_num, smoke_nohand_num, smoke_hard_num))
print('nosmoke_bg:%d, nosmoke_face:%d, nosmoke_susp:%d, nosmoke_cover:%d'%(nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num))
if __name__ == '__main__':
if len(sys.argv) < 2:
print('useage: add_common_box_smoke_region.py org_json_dir dst_json_dir done_root_dir')
exit()
org_json_dir = sys.argv[1]
dst_json_dir = sys.argv[2]
done_root_dir = sys.argv[3]
add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir)
|
flexible
|
{
"blob_id": "75833617996549167fa157ff78cc1a11f870784f",
"index": 8639,
"step-1": "<mask token>\n\n\ndef getRegionClass(image_path, data_id, imgName):\n region_class = ['nosmoke_background', 'nosmoke_face', 'nosmoke_suspect',\n 'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']\n label_class = ['nosmoke_bg', 'nosmoke_face', 'nosmoke_susp',\n 'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']\n select_class = None\n for class_id in range(len(region_class)):\n cur_class = region_class[class_id]\n cur_label_class = label_class[class_id]\n check_file_name = os.path.join(image_path, data_id, cur_class, imgName)\n if os.path.isfile(check_file_name):\n select_class = cur_label_class\n break\n return select_class\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef getRegionClass(image_path, data_id, imgName):\n region_class = ['nosmoke_background', 'nosmoke_face', 'nosmoke_suspect',\n 'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']\n label_class = ['nosmoke_bg', 'nosmoke_face', 'nosmoke_susp',\n 'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']\n select_class = None\n for class_id in range(len(region_class)):\n cur_class = region_class[class_id]\n cur_label_class = label_class[class_id]\n check_file_name = os.path.join(image_path, data_id, cur_class, imgName)\n if os.path.isfile(check_file_name):\n select_class = cur_label_class\n break\n return select_class\n\n\ndef add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir):\n if not os.path.exists(dst_json_dir):\n os.makedirs(dst_json_dir)\n smoke_hand_num, smoke_nohand_num, smoke_hard_num = 0, 0, 0\n (nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num\n ) = 0, 0, 0, 0\n for json_file_name in glob.glob(org_json_dir + '/*.json'):\n json_file = open(json_file_name, 'r')\n base_file_id = os.path.basename(json_file_name)[:-5]\n print(base_file_id + '.json')\n json_lines = json_file.read().splitlines()\n dst_json_lines = []\n new_json_file = codecs.open(dst_json_dir + '/' + base_file_id +\n '.json', 'w', 'utf-8')\n new_json_file.close()\n new_json_file = codecs.open(dst_json_dir + '/' + base_file_id +\n '.json', 'a+', 'utf-8')\n for line in json_lines:\n if line[0] == '#':\n new_json_file.write(line + '\\n')\n continue\n js = json.loads(line, object_pairs_hook=OrderedDict)\n imgName = js['image_key']\n select_class = getRegionClass(done_root_dir, base_file_id, imgName)\n if select_class == None:\n new_json_file.write(line + '\\n')\n continue\n new_common_box = {}\n new_attrs = {}\n new_attrs['ignore'] = 'no'\n new_attrs['type'] = 'smoke_region'\n new_attrs['class'] = select_class\n new_common_box['attrs'] = new_attrs\n if select_class == 'smoke_hard':\n new_attrs['ignore'] = 'yes'\n if select_class == 'smoke_hand':\n smoke_hand_num += 1\n elif select_class == 'smoke_nohand':\n smoke_nohand_num += 1\n elif select_class == 'smoke_hard':\n smoke_hard_num += 1\n elif select_class == 'nosmoke_bg':\n nosmoke_bg_num += 1\n elif select_class == 'nosmoke_face':\n nosmoke_face_num += 1\n elif select_class == 'nosmoke_susp':\n nosmoke_susp_num += 1\n elif select_class == 'nosmoke_cover':\n nosmoke_cover_num += 1\n else:\n print('Invalid smoke class.', select_class)\n if 'common_box' in js:\n js['common_box'].append(new_common_box)\n else:\n js['common_box'] = [new_common_box]\n new_js_line = json.dumps(js) + '\\n'\n new_json_file.write(new_js_line)\n new_json_file.close()\n print('write ' + base_file_id + '.json')\n print('add_common_box_smoke_region done.')\n print('smoke_hand:%d, smoke_nohand:%d, smoke_hard:%d' % (smoke_hand_num,\n smoke_nohand_num, smoke_hard_num))\n print(\n 'nosmoke_bg:%d, nosmoke_face:%d, nosmoke_susp:%d, nosmoke_cover:%d' %\n (nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num)\n )\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef getRegionClass(image_path, data_id, imgName):\n region_class = ['nosmoke_background', 'nosmoke_face', 'nosmoke_suspect',\n 'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']\n label_class = ['nosmoke_bg', 'nosmoke_face', 'nosmoke_susp',\n 'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']\n select_class = None\n for class_id in range(len(region_class)):\n cur_class = region_class[class_id]\n cur_label_class = label_class[class_id]\n check_file_name = os.path.join(image_path, data_id, cur_class, imgName)\n if os.path.isfile(check_file_name):\n select_class = cur_label_class\n break\n return select_class\n\n\ndef add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir):\n if not os.path.exists(dst_json_dir):\n os.makedirs(dst_json_dir)\n smoke_hand_num, smoke_nohand_num, smoke_hard_num = 0, 0, 0\n (nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num\n ) = 0, 0, 0, 0\n for json_file_name in glob.glob(org_json_dir + '/*.json'):\n json_file = open(json_file_name, 'r')\n base_file_id = os.path.basename(json_file_name)[:-5]\n print(base_file_id + '.json')\n json_lines = json_file.read().splitlines()\n dst_json_lines = []\n new_json_file = codecs.open(dst_json_dir + '/' + base_file_id +\n '.json', 'w', 'utf-8')\n new_json_file.close()\n new_json_file = codecs.open(dst_json_dir + '/' + base_file_id +\n '.json', 'a+', 'utf-8')\n for line in json_lines:\n if line[0] == '#':\n new_json_file.write(line + '\\n')\n continue\n js = json.loads(line, object_pairs_hook=OrderedDict)\n imgName = js['image_key']\n select_class = getRegionClass(done_root_dir, base_file_id, imgName)\n if select_class == None:\n new_json_file.write(line + '\\n')\n continue\n new_common_box = {}\n new_attrs = {}\n new_attrs['ignore'] = 'no'\n new_attrs['type'] = 'smoke_region'\n new_attrs['class'] = select_class\n new_common_box['attrs'] = new_attrs\n if select_class == 'smoke_hard':\n new_attrs['ignore'] = 'yes'\n if select_class == 'smoke_hand':\n smoke_hand_num += 1\n elif select_class == 'smoke_nohand':\n smoke_nohand_num += 1\n elif select_class == 'smoke_hard':\n smoke_hard_num += 1\n elif select_class == 'nosmoke_bg':\n nosmoke_bg_num += 1\n elif select_class == 'nosmoke_face':\n nosmoke_face_num += 1\n elif select_class == 'nosmoke_susp':\n nosmoke_susp_num += 1\n elif select_class == 'nosmoke_cover':\n nosmoke_cover_num += 1\n else:\n print('Invalid smoke class.', select_class)\n if 'common_box' in js:\n js['common_box'].append(new_common_box)\n else:\n js['common_box'] = [new_common_box]\n new_js_line = json.dumps(js) + '\\n'\n new_json_file.write(new_js_line)\n new_json_file.close()\n print('write ' + base_file_id + '.json')\n print('add_common_box_smoke_region done.')\n print('smoke_hand:%d, smoke_nohand:%d, smoke_hard:%d' % (smoke_hand_num,\n smoke_nohand_num, smoke_hard_num))\n print(\n 'nosmoke_bg:%d, nosmoke_face:%d, nosmoke_susp:%d, nosmoke_cover:%d' %\n (nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num)\n )\n\n\nif __name__ == '__main__':\n if len(sys.argv) < 2:\n print(\n 'useage: add_common_box_smoke_region.py org_json_dir dst_json_dir done_root_dir'\n )\n exit()\n org_json_dir = sys.argv[1]\n dst_json_dir = sys.argv[2]\n done_root_dir = sys.argv[3]\n add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir)\n",
"step-4": "import os\nimport sys\nimport glob\nimport shutil\nimport json\nimport codecs\nfrom collections import OrderedDict\n\n\ndef getRegionClass(image_path, data_id, imgName):\n region_class = ['nosmoke_background', 'nosmoke_face', 'nosmoke_suspect',\n 'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']\n label_class = ['nosmoke_bg', 'nosmoke_face', 'nosmoke_susp',\n 'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']\n select_class = None\n for class_id in range(len(region_class)):\n cur_class = region_class[class_id]\n cur_label_class = label_class[class_id]\n check_file_name = os.path.join(image_path, data_id, cur_class, imgName)\n if os.path.isfile(check_file_name):\n select_class = cur_label_class\n break\n return select_class\n\n\ndef add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir):\n if not os.path.exists(dst_json_dir):\n os.makedirs(dst_json_dir)\n smoke_hand_num, smoke_nohand_num, smoke_hard_num = 0, 0, 0\n (nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num\n ) = 0, 0, 0, 0\n for json_file_name in glob.glob(org_json_dir + '/*.json'):\n json_file = open(json_file_name, 'r')\n base_file_id = os.path.basename(json_file_name)[:-5]\n print(base_file_id + '.json')\n json_lines = json_file.read().splitlines()\n dst_json_lines = []\n new_json_file = codecs.open(dst_json_dir + '/' + base_file_id +\n '.json', 'w', 'utf-8')\n new_json_file.close()\n new_json_file = codecs.open(dst_json_dir + '/' + base_file_id +\n '.json', 'a+', 'utf-8')\n for line in json_lines:\n if line[0] == '#':\n new_json_file.write(line + '\\n')\n continue\n js = json.loads(line, object_pairs_hook=OrderedDict)\n imgName = js['image_key']\n select_class = getRegionClass(done_root_dir, base_file_id, imgName)\n if select_class == None:\n new_json_file.write(line + '\\n')\n continue\n new_common_box = {}\n new_attrs = {}\n new_attrs['ignore'] = 'no'\n new_attrs['type'] = 'smoke_region'\n new_attrs['class'] = select_class\n new_common_box['attrs'] = new_attrs\n if select_class == 'smoke_hard':\n new_attrs['ignore'] = 'yes'\n if select_class == 'smoke_hand':\n smoke_hand_num += 1\n elif select_class == 'smoke_nohand':\n smoke_nohand_num += 1\n elif select_class == 'smoke_hard':\n smoke_hard_num += 1\n elif select_class == 'nosmoke_bg':\n nosmoke_bg_num += 1\n elif select_class == 'nosmoke_face':\n nosmoke_face_num += 1\n elif select_class == 'nosmoke_susp':\n nosmoke_susp_num += 1\n elif select_class == 'nosmoke_cover':\n nosmoke_cover_num += 1\n else:\n print('Invalid smoke class.', select_class)\n if 'common_box' in js:\n js['common_box'].append(new_common_box)\n else:\n js['common_box'] = [new_common_box]\n new_js_line = json.dumps(js) + '\\n'\n new_json_file.write(new_js_line)\n new_json_file.close()\n print('write ' + base_file_id + '.json')\n print('add_common_box_smoke_region done.')\n print('smoke_hand:%d, smoke_nohand:%d, smoke_hard:%d' % (smoke_hand_num,\n smoke_nohand_num, smoke_hard_num))\n print(\n 'nosmoke_bg:%d, nosmoke_face:%d, nosmoke_susp:%d, nosmoke_cover:%d' %\n (nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num)\n )\n\n\nif __name__ == '__main__':\n if len(sys.argv) < 2:\n print(\n 'useage: add_common_box_smoke_region.py org_json_dir dst_json_dir done_root_dir'\n )\n exit()\n org_json_dir = sys.argv[1]\n dst_json_dir = sys.argv[2]\n done_root_dir = sys.argv[3]\n add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir)\n",
"step-5": "import os\nimport sys\nimport glob\nimport shutil\nimport json\nimport codecs\nfrom collections import OrderedDict\n\ndef getRegionClass(image_path, data_id, imgName):\n region_class = ['nosmoke_background', 'nosmoke_face', 'nosmoke_suspect', 'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']\n label_class = ['nosmoke_bg', 'nosmoke_face', 'nosmoke_susp', 'nosmoke_cover', 'smoke_hand', 'smoke_nohand', 'smoke_hard']\n select_class = None\n for class_id in range(len(region_class)):\n cur_class = region_class[class_id]\n cur_label_class = label_class[class_id]\n check_file_name = os.path.join(image_path, data_id, cur_class, imgName)\n if os.path.isfile(check_file_name):\n select_class = cur_label_class\n #print check_file_name\n break\n return select_class\n\ndef add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir):\n if not os.path.exists(dst_json_dir):\n os.makedirs(dst_json_dir)\n \n smoke_hand_num, smoke_nohand_num, smoke_hard_num = 0, 0, 0\n nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num = 0, 0, 0, 0\n for json_file_name in glob.glob(org_json_dir + '/*.json'):\n json_file = open(json_file_name, 'r')\n base_file_id = os.path.basename(json_file_name)[:-5]\n print(base_file_id + '.json')\n \n json_lines = json_file.read().splitlines()\n dst_json_lines = []\n \n new_json_file = codecs.open(dst_json_dir + '/' + base_file_id + '.json', \"w\", \"utf-8\")\n new_json_file.close()\n new_json_file = codecs.open(dst_json_dir + '/' + base_file_id + '.json', \"a+\", 'utf-8')\n for line in json_lines:\n if line[0] == '#':\n new_json_file.write(line + '\\n')\n continue\n js = json.loads(line, object_pairs_hook=OrderedDict)\n \n #new_js_line = json.dumps(js) + \"\\n\"\n #new_json_file.write(new_js_line)\n #continue\n \n imgName = js[\"image_key\"]\n select_class = getRegionClass(done_root_dir, base_file_id, imgName)\n if select_class == None:\n new_json_file.write(line + '\\n') #\n #print('Not Found: ', done_root_dir, base_file_id, imgName)\n continue\n #print select_class\n new_common_box = {}\n new_attrs = {}\n new_attrs['ignore'] = 'no'\n new_attrs['type'] = 'smoke_region'\n new_attrs['class'] = select_class\n new_common_box['attrs'] = new_attrs\n if select_class == 'smoke_hard':\n new_attrs['ignore'] = 'yes'\n \n # statistic\n if select_class == 'smoke_hand':\n smoke_hand_num += 1\n elif select_class == 'smoke_nohand':\n smoke_nohand_num += 1\n elif select_class == 'smoke_hard':\n smoke_hard_num += 1\n elif select_class == 'nosmoke_bg':\n nosmoke_bg_num += 1\n elif select_class == 'nosmoke_face':\n nosmoke_face_num += 1\n elif select_class == 'nosmoke_susp':\n nosmoke_susp_num += 1\n elif select_class == 'nosmoke_cover':\n nosmoke_cover_num += 1\n else:\n print('Invalid smoke class.', select_class)\n \n # common box, like phone, hand\n if 'common_box' in js:\n js['common_box'].append(new_common_box)\n else:\n js['common_box'] = [new_common_box]\n new_js_line = json.dumps(js) + \"\\n\"\n new_json_file.write(new_js_line)\n new_json_file.close()\n print('write ' + base_file_id + '.json')\n print('add_common_box_smoke_region done.')\n print('smoke_hand:%d, smoke_nohand:%d, smoke_hard:%d'%(smoke_hand_num, smoke_nohand_num, smoke_hard_num))\n print('nosmoke_bg:%d, nosmoke_face:%d, nosmoke_susp:%d, nosmoke_cover:%d'%(nosmoke_bg_num, nosmoke_face_num, nosmoke_susp_num, nosmoke_cover_num))\n \nif __name__ == '__main__':\n if len(sys.argv) < 2:\n print('useage: add_common_box_smoke_region.py org_json_dir dst_json_dir done_root_dir')\n exit()\n org_json_dir = sys.argv[1]\n dst_json_dir = sys.argv[2]\n done_root_dir = sys.argv[3]\n add_common_box_smoke_region(org_json_dir, dst_json_dir, done_root_dir)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def convert(decimal_num):
roman = {(1000): 'M', (900): 'CM', (500): 'D', (400): 'CD', (100): 'C',
(90): 'XC', (50): 'L', (40): 'XL', (10): 'X', (9): 'IX', (5): 'V',
(4): 'IV', (1): 'I'}
num_to_roman = ''
for i in roman.keys():
num_to_roman += roman[i] * (decimal_num // i)
decimal_num %= i
return num_to_roman
@app.route('/', methods=['POST', 'GET'])
def main_post():
if request.method == 'POST':
alpha = request.form['number']
if not alpha.isdecimal():
return render_template('index.html', not_valid=True,
developer_name='Pablo')
number = int(alpha)
if not 0 < number < 4000:
return render_template('index.html', not_valid=True,
developer_name='Pablo')
return render_template('result.html', developer_name='Pablo',
number_decimal=number, number_roman=convert(number))
else:
return render_template('index.html', not_valid=False,
develeoper_name='Pablo')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def convert(decimal_num):
roman = {(1000): 'M', (900): 'CM', (500): 'D', (400): 'CD', (100): 'C',
(90): 'XC', (50): 'L', (40): 'XL', (10): 'X', (9): 'IX', (5): 'V',
(4): 'IV', (1): 'I'}
num_to_roman = ''
for i in roman.keys():
num_to_roman += roman[i] * (decimal_num // i)
decimal_num %= i
return num_to_roman
@app.route('/', methods=['POST', 'GET'])
def main_post():
if request.method == 'POST':
alpha = request.form['number']
if not alpha.isdecimal():
return render_template('index.html', not_valid=True,
developer_name='Pablo')
number = int(alpha)
if not 0 < number < 4000:
return render_template('index.html', not_valid=True,
developer_name='Pablo')
return render_template('result.html', developer_name='Pablo',
number_decimal=number, number_roman=convert(number))
else:
return render_template('index.html', not_valid=False,
develeoper_name='Pablo')
if __name__ == '__main__':
app.run(host='0.0.0.0', port=80)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app = Flask(__name__)
def convert(decimal_num):
roman = {(1000): 'M', (900): 'CM', (500): 'D', (400): 'CD', (100): 'C',
(90): 'XC', (50): 'L', (40): 'XL', (10): 'X', (9): 'IX', (5): 'V',
(4): 'IV', (1): 'I'}
num_to_roman = ''
for i in roman.keys():
num_to_roman += roman[i] * (decimal_num // i)
decimal_num %= i
return num_to_roman
@app.route('/', methods=['POST', 'GET'])
def main_post():
if request.method == 'POST':
alpha = request.form['number']
if not alpha.isdecimal():
return render_template('index.html', not_valid=True,
developer_name='Pablo')
number = int(alpha)
if not 0 < number < 4000:
return render_template('index.html', not_valid=True,
developer_name='Pablo')
return render_template('result.html', developer_name='Pablo',
number_decimal=number, number_roman=convert(number))
else:
return render_template('index.html', not_valid=False,
develeoper_name='Pablo')
if __name__ == '__main__':
app.run(host='0.0.0.0', port=80)
<|reserved_special_token_1|>
from flask import Flask, render_template, request
app = Flask(__name__)
def convert(decimal_num):
roman = {(1000): 'M', (900): 'CM', (500): 'D', (400): 'CD', (100): 'C',
(90): 'XC', (50): 'L', (40): 'XL', (10): 'X', (9): 'IX', (5): 'V',
(4): 'IV', (1): 'I'}
num_to_roman = ''
for i in roman.keys():
num_to_roman += roman[i] * (decimal_num // i)
decimal_num %= i
return num_to_roman
@app.route('/', methods=['POST', 'GET'])
def main_post():
if request.method == 'POST':
alpha = request.form['number']
if not alpha.isdecimal():
return render_template('index.html', not_valid=True,
developer_name='Pablo')
number = int(alpha)
if not 0 < number < 4000:
return render_template('index.html', not_valid=True,
developer_name='Pablo')
return render_template('result.html', developer_name='Pablo',
number_decimal=number, number_roman=convert(number))
else:
return render_template('index.html', not_valid=False,
develeoper_name='Pablo')
if __name__ == '__main__':
app.run(host='0.0.0.0', port=80)
<|reserved_special_token_1|>
from flask import Flask, render_template, request
app = Flask(__name__)
def convert(decimal_num):
roman = {1000:'M', 900:'CM', 500:'D', 400:'CD', 100:'C', 90:'XC', 50:'L', 40:'XL', 10:'X', 9:'IX', 5:'V', 4:'IV', 1:'I'}
num_to_roman = ''
for i in roman.keys():
num_to_roman += roman[i]*(decimal_num//i)
decimal_num %= i
return num_to_roman
# Ister ustekini kullan ister bunu
#def convert_to_roman(num):
# roman = ['M','CM','D','CD','C','XC','L','XL','X','IX','V','IV','I']
# number = [1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1]
# romanvalue = ''
# for i,d in enumerate(number):
# while (num >= d):
# num -= d
# romanvalue += roman[i]
# return romanvalue
@app.route('/', methods=['POST','GET'])
def main_post():
if request.method == 'POST':
alpha = request.form['number'] # degerler dictionary olarak geliyor dedi o yuzden key i aliyoz [] ile
if not alpha.isdecimal():
return render_template('index.html', not_valid=True,developer_name='Pablo')
number=int(alpha)
if not 0<number<4000:
return render_template('index.html', not_valid=True,developer_name='Pablo')
return render_template('result.html', developer_name='Pablo', number_decimal=number,number_roman=convert(number))
else:
return render_template('index.html',not_valid = False, develeoper_name='Pablo')
if __name__=='__main__':
#app.run(debug=True)
app.run(host='0.0.0.0',port=80)
|
flexible
|
{
"blob_id": "7025cc896035c59e0bbb7943493b6ca24fd9e6ca",
"index": 9429,
"step-1": "<mask token>\n\n\ndef convert(decimal_num):\n roman = {(1000): 'M', (900): 'CM', (500): 'D', (400): 'CD', (100): 'C',\n (90): 'XC', (50): 'L', (40): 'XL', (10): 'X', (9): 'IX', (5): 'V',\n (4): 'IV', (1): 'I'}\n num_to_roman = ''\n for i in roman.keys():\n num_to_roman += roman[i] * (decimal_num // i)\n decimal_num %= i\n return num_to_roman\n\n\[email protected]('/', methods=['POST', 'GET'])\ndef main_post():\n if request.method == 'POST':\n alpha = request.form['number']\n if not alpha.isdecimal():\n return render_template('index.html', not_valid=True,\n developer_name='Pablo')\n number = int(alpha)\n if not 0 < number < 4000:\n return render_template('index.html', not_valid=True,\n developer_name='Pablo')\n return render_template('result.html', developer_name='Pablo',\n number_decimal=number, number_roman=convert(number))\n else:\n return render_template('index.html', not_valid=False,\n develeoper_name='Pablo')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef convert(decimal_num):\n roman = {(1000): 'M', (900): 'CM', (500): 'D', (400): 'CD', (100): 'C',\n (90): 'XC', (50): 'L', (40): 'XL', (10): 'X', (9): 'IX', (5): 'V',\n (4): 'IV', (1): 'I'}\n num_to_roman = ''\n for i in roman.keys():\n num_to_roman += roman[i] * (decimal_num // i)\n decimal_num %= i\n return num_to_roman\n\n\[email protected]('/', methods=['POST', 'GET'])\ndef main_post():\n if request.method == 'POST':\n alpha = request.form['number']\n if not alpha.isdecimal():\n return render_template('index.html', not_valid=True,\n developer_name='Pablo')\n number = int(alpha)\n if not 0 < number < 4000:\n return render_template('index.html', not_valid=True,\n developer_name='Pablo')\n return render_template('result.html', developer_name='Pablo',\n number_decimal=number, number_roman=convert(number))\n else:\n return render_template('index.html', not_valid=False,\n develeoper_name='Pablo')\n\n\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=80)\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\ndef convert(decimal_num):\n roman = {(1000): 'M', (900): 'CM', (500): 'D', (400): 'CD', (100): 'C',\n (90): 'XC', (50): 'L', (40): 'XL', (10): 'X', (9): 'IX', (5): 'V',\n (4): 'IV', (1): 'I'}\n num_to_roman = ''\n for i in roman.keys():\n num_to_roman += roman[i] * (decimal_num // i)\n decimal_num %= i\n return num_to_roman\n\n\[email protected]('/', methods=['POST', 'GET'])\ndef main_post():\n if request.method == 'POST':\n alpha = request.form['number']\n if not alpha.isdecimal():\n return render_template('index.html', not_valid=True,\n developer_name='Pablo')\n number = int(alpha)\n if not 0 < number < 4000:\n return render_template('index.html', not_valid=True,\n developer_name='Pablo')\n return render_template('result.html', developer_name='Pablo',\n number_decimal=number, number_roman=convert(number))\n else:\n return render_template('index.html', not_valid=False,\n develeoper_name='Pablo')\n\n\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=80)\n",
"step-4": "from flask import Flask, render_template, request\napp = Flask(__name__)\n\n\ndef convert(decimal_num):\n roman = {(1000): 'M', (900): 'CM', (500): 'D', (400): 'CD', (100): 'C',\n (90): 'XC', (50): 'L', (40): 'XL', (10): 'X', (9): 'IX', (5): 'V',\n (4): 'IV', (1): 'I'}\n num_to_roman = ''\n for i in roman.keys():\n num_to_roman += roman[i] * (decimal_num // i)\n decimal_num %= i\n return num_to_roman\n\n\[email protected]('/', methods=['POST', 'GET'])\ndef main_post():\n if request.method == 'POST':\n alpha = request.form['number']\n if not alpha.isdecimal():\n return render_template('index.html', not_valid=True,\n developer_name='Pablo')\n number = int(alpha)\n if not 0 < number < 4000:\n return render_template('index.html', not_valid=True,\n developer_name='Pablo')\n return render_template('result.html', developer_name='Pablo',\n number_decimal=number, number_roman=convert(number))\n else:\n return render_template('index.html', not_valid=False,\n develeoper_name='Pablo')\n\n\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=80)\n",
"step-5": "from flask import Flask, render_template, request\n\napp = Flask(__name__)\n\ndef convert(decimal_num):\n roman = {1000:'M', 900:'CM', 500:'D', 400:'CD', 100:'C', 90:'XC', 50:'L', 40:'XL', 10:'X', 9:'IX', 5:'V', 4:'IV', 1:'I'}\n num_to_roman = ''\n for i in roman.keys():\n num_to_roman += roman[i]*(decimal_num//i)\n decimal_num %= i \n return num_to_roman\n\n# Ister ustekini kullan ister bunu\n#def convert_to_roman(num):\n# roman = ['M','CM','D','CD','C','XC','L','XL','X','IX','V','IV','I']\n# number = [1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1]\n# romanvalue = ''\n# for i,d in enumerate(number):\n# while (num >= d): \n# num -= d\n# romanvalue += roman[i]\n# return romanvalue\n\[email protected]('/', methods=['POST','GET'])\ndef main_post():\n if request.method == 'POST':\n alpha = request.form['number'] # degerler dictionary olarak geliyor dedi o yuzden key i aliyoz [] ile\n if not alpha.isdecimal():\n return render_template('index.html', not_valid=True,developer_name='Pablo')\n number=int(alpha)\n if not 0<number<4000:\n return render_template('index.html', not_valid=True,developer_name='Pablo')\n return render_template('result.html', developer_name='Pablo', number_decimal=number,number_roman=convert(number))\n \n\n\n\n else:\n return render_template('index.html',not_valid = False, develeoper_name='Pablo') \n\n\n\n\n\n\n\n\nif __name__=='__main__':\n #app.run(debug=True)\n app.run(host='0.0.0.0',port=80)",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class Launcher:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class ShotTracker:
""" Graphical depiction of a projectile flight using a Circle """
def __init__(self, win, angle, velocity, height):
"""win is the GraphWin to display the shot, angle, velocity, and
height are initial projectile parameters.
"""
self.proj = Projectile(angle, velocity, height)
self.marker = Circle(Point(0, height), 3)
self.marker.setFill('red')
self.marker.setOutline('red')
self.marker.draw(win)
def update(self, dt):
""" Move the shot dt seconds farther along its flight """
self.proj.update(dt)
center = self.marker.getCenter()
dx = self.proj.getX() - center.getX()
dy = self.proj.getY() - center.getY()
self.marker.move(dx, dy)
def getX(self):
""" return the current x coordinate of the shot's center """
return self.proj.getX()
def getY(self):
""" return the current y coordinate of the shot's center """
return self.proj.getY()
def undraw(self):
""" undraw the shot """
self.marker.undraw()
class ProjectileApp:
def __init__(self):
self.win = GraphWin('Projectile Animation', 640, 480)
self.win.setCoords(-10, -10, 210, 155)
Line(Point(-10, 0), Point(210, 0)).draw(self.win)
for x in range(0, 210, 50):
Text(Point(x, -7), str(x)).draw(self.win)
Line(Point(x, 0), Point(x, 2)).draw(self.win)
self.launcher = Launcher(self.win)
self.shots = []
def updateShots(self, dt):
alive = []
for shot in self.shots:
shot.update(dt)
if shot.getY() >= 0 and shot.getX() < 210:
alive.append(shot)
else:
shot.undraw()
self.shots = alive
def run(self):
while True:
self.updateShots(1 / 30)
key = self.win.checkKey()
if key in ['q', 'Q']:
break
if key == 'Up':
self.launcher.adjAngle(5)
elif key == 'Down':
self.launcher.adjAngle(-5)
elif key == 'Right':
self.launcher.adjVel(5)
elif key == 'Left':
self.launcher.adjVel(-5)
elif key == 'f':
self.shots.append(self.launcher.fire())
update(30)
self.win.close()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Launcher:
def __init__(self, win):
"""Create inital launcher with angle 45 degrees and velocity 40
win is the GraphWin to draw the launcher in.
"""
base = Circle(Point(0, 0), 3)
base.setFill('red')
base.setOutline('red')
base.draw(win)
self.win = win
self.angle = radians(45.0)
self.vel = 40.0
self.arrow = Line(Point(0, 0), Point(0, 0)).draw(win)
self.redraw()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def adjVel(self, amt):
""" change velocity by amt"""
self.vel = self.vel + amt
self.redraw()
<|reserved_special_token_0|>
class ShotTracker:
""" Graphical depiction of a projectile flight using a Circle """
def __init__(self, win, angle, velocity, height):
"""win is the GraphWin to display the shot, angle, velocity, and
height are initial projectile parameters.
"""
self.proj = Projectile(angle, velocity, height)
self.marker = Circle(Point(0, height), 3)
self.marker.setFill('red')
self.marker.setOutline('red')
self.marker.draw(win)
def update(self, dt):
""" Move the shot dt seconds farther along its flight """
self.proj.update(dt)
center = self.marker.getCenter()
dx = self.proj.getX() - center.getX()
dy = self.proj.getY() - center.getY()
self.marker.move(dx, dy)
def getX(self):
""" return the current x coordinate of the shot's center """
return self.proj.getX()
def getY(self):
""" return the current y coordinate of the shot's center """
return self.proj.getY()
def undraw(self):
""" undraw the shot """
self.marker.undraw()
class ProjectileApp:
def __init__(self):
self.win = GraphWin('Projectile Animation', 640, 480)
self.win.setCoords(-10, -10, 210, 155)
Line(Point(-10, 0), Point(210, 0)).draw(self.win)
for x in range(0, 210, 50):
Text(Point(x, -7), str(x)).draw(self.win)
Line(Point(x, 0), Point(x, 2)).draw(self.win)
self.launcher = Launcher(self.win)
self.shots = []
def updateShots(self, dt):
alive = []
for shot in self.shots:
shot.update(dt)
if shot.getY() >= 0 and shot.getX() < 210:
alive.append(shot)
else:
shot.undraw()
self.shots = alive
def run(self):
while True:
self.updateShots(1 / 30)
key = self.win.checkKey()
if key in ['q', 'Q']:
break
if key == 'Up':
self.launcher.adjAngle(5)
elif key == 'Down':
self.launcher.adjAngle(-5)
elif key == 'Right':
self.launcher.adjVel(5)
elif key == 'Left':
self.launcher.adjVel(-5)
elif key == 'f':
self.shots.append(self.launcher.fire())
update(30)
self.win.close()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Launcher:
def __init__(self, win):
"""Create inital launcher with angle 45 degrees and velocity 40
win is the GraphWin to draw the launcher in.
"""
base = Circle(Point(0, 0), 3)
base.setFill('red')
base.setOutline('red')
base.draw(win)
self.win = win
self.angle = radians(45.0)
self.vel = 40.0
self.arrow = Line(Point(0, 0), Point(0, 0)).draw(win)
self.redraw()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def adjVel(self, amt):
""" change velocity by amt"""
self.vel = self.vel + amt
self.redraw()
def fire(self):
return ShotTracker(self.win, degrees(self.angle), self.vel, 0.0)
class ShotTracker:
""" Graphical depiction of a projectile flight using a Circle """
def __init__(self, win, angle, velocity, height):
"""win is the GraphWin to display the shot, angle, velocity, and
height are initial projectile parameters.
"""
self.proj = Projectile(angle, velocity, height)
self.marker = Circle(Point(0, height), 3)
self.marker.setFill('red')
self.marker.setOutline('red')
self.marker.draw(win)
def update(self, dt):
""" Move the shot dt seconds farther along its flight """
self.proj.update(dt)
center = self.marker.getCenter()
dx = self.proj.getX() - center.getX()
dy = self.proj.getY() - center.getY()
self.marker.move(dx, dy)
def getX(self):
""" return the current x coordinate of the shot's center """
return self.proj.getX()
def getY(self):
""" return the current y coordinate of the shot's center """
return self.proj.getY()
def undraw(self):
""" undraw the shot """
self.marker.undraw()
class ProjectileApp:
def __init__(self):
self.win = GraphWin('Projectile Animation', 640, 480)
self.win.setCoords(-10, -10, 210, 155)
Line(Point(-10, 0), Point(210, 0)).draw(self.win)
for x in range(0, 210, 50):
Text(Point(x, -7), str(x)).draw(self.win)
Line(Point(x, 0), Point(x, 2)).draw(self.win)
self.launcher = Launcher(self.win)
self.shots = []
def updateShots(self, dt):
alive = []
for shot in self.shots:
shot.update(dt)
if shot.getY() >= 0 and shot.getX() < 210:
alive.append(shot)
else:
shot.undraw()
self.shots = alive
def run(self):
while True:
self.updateShots(1 / 30)
key = self.win.checkKey()
if key in ['q', 'Q']:
break
if key == 'Up':
self.launcher.adjAngle(5)
elif key == 'Down':
self.launcher.adjAngle(-5)
elif key == 'Right':
self.launcher.adjVel(5)
elif key == 'Left':
self.launcher.adjVel(-5)
elif key == 'f':
self.shots.append(self.launcher.fire())
update(30)
self.win.close()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Launcher:
def __init__(self, win):
"""Create inital launcher with angle 45 degrees and velocity 40
win is the GraphWin to draw the launcher in.
"""
base = Circle(Point(0, 0), 3)
base.setFill('red')
base.setOutline('red')
base.draw(win)
self.win = win
self.angle = radians(45.0)
self.vel = 40.0
self.arrow = Line(Point(0, 0), Point(0, 0)).draw(win)
self.redraw()
def redraw(self):
"""undraw the arrow and draw a new one for the
current values of angle and velocity.
"""
self.arrow.undraw()
pt2 = Point(self.vel * cos(self.angle), self.vel * sin(self.angle))
self.arrow = Line(Point(0, 0), pt2).draw(self.win)
self.arrow.setArrow('last')
self.arrow.setWidth(3)
def adjAngle(self, amt):
""" change angle by amt degrees """
self.angle = self.angle + radians(amt)
self.redraw()
def adjVel(self, amt):
""" change velocity by amt"""
self.vel = self.vel + amt
self.redraw()
def fire(self):
return ShotTracker(self.win, degrees(self.angle), self.vel, 0.0)
class ShotTracker:
""" Graphical depiction of a projectile flight using a Circle """
def __init__(self, win, angle, velocity, height):
"""win is the GraphWin to display the shot, angle, velocity, and
height are initial projectile parameters.
"""
self.proj = Projectile(angle, velocity, height)
self.marker = Circle(Point(0, height), 3)
self.marker.setFill('red')
self.marker.setOutline('red')
self.marker.draw(win)
def update(self, dt):
""" Move the shot dt seconds farther along its flight """
self.proj.update(dt)
center = self.marker.getCenter()
dx = self.proj.getX() - center.getX()
dy = self.proj.getY() - center.getY()
self.marker.move(dx, dy)
def getX(self):
""" return the current x coordinate of the shot's center """
return self.proj.getX()
def getY(self):
""" return the current y coordinate of the shot's center """
return self.proj.getY()
def undraw(self):
""" undraw the shot """
self.marker.undraw()
class ProjectileApp:
def __init__(self):
self.win = GraphWin('Projectile Animation', 640, 480)
self.win.setCoords(-10, -10, 210, 155)
Line(Point(-10, 0), Point(210, 0)).draw(self.win)
for x in range(0, 210, 50):
Text(Point(x, -7), str(x)).draw(self.win)
Line(Point(x, 0), Point(x, 2)).draw(self.win)
self.launcher = Launcher(self.win)
self.shots = []
def updateShots(self, dt):
alive = []
for shot in self.shots:
shot.update(dt)
if shot.getY() >= 0 and shot.getX() < 210:
alive.append(shot)
else:
shot.undraw()
self.shots = alive
def run(self):
while True:
self.updateShots(1 / 30)
key = self.win.checkKey()
if key in ['q', 'Q']:
break
if key == 'Up':
self.launcher.adjAngle(5)
elif key == 'Down':
self.launcher.adjAngle(-5)
elif key == 'Right':
self.launcher.adjVel(5)
elif key == 'Left':
self.launcher.adjVel(-5)
elif key == 'f':
self.shots.append(self.launcher.fire())
update(30)
self.win.close()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
# animation2.py
# multiple-shot cannonball animation
from math import sqrt, sin, cos, radians, degrees
from graphics import *
from projectile import Projectile
from button import Button
class Launcher:
def __init__(self, win):
"""Create inital launcher with angle 45 degrees and velocity 40
win is the GraphWin to draw the launcher in.
"""
# draw the base shot of the launcher
base = Circle(Point(0,0), 3)
base.setFill("red")
base.setOutline("red")
base.draw(win)
# save the window and create initial angle and velocity
self.win = win
self.angle = radians(45.0)
self.vel = 40.0
# create inital "dummy" arrow
self.arrow = Line(Point(0,0), Point(0,0)).draw(win)
# replace it with the correct arrow
self.redraw()
def redraw(self):
"""undraw the arrow and draw a new one for the
current values of angle and velocity.
"""
self.arrow.undraw()
pt2 = Point(self.vel*cos(self.angle), self.vel*sin(self.angle))
self.arrow = Line(Point(0,0), pt2).draw(self.win)
self.arrow.setArrow("last")
self.arrow.setWidth(3)
def adjAngle(self, amt):
""" change angle by amt degrees """
self.angle = self.angle+radians(amt)
self.redraw()
def adjVel(self, amt):
""" change velocity by amt"""
self.vel = self.vel + amt
self.redraw()
def fire(self):
return ShotTracker(self.win, degrees(self.angle), self.vel, 0.0)
class ShotTracker:
""" Graphical depiction of a projectile flight using a Circle """
def __init__(self, win, angle, velocity, height):
"""win is the GraphWin to display the shot, angle, velocity, and
height are initial projectile parameters.
"""
self.proj = Projectile(angle, velocity, height)
self.marker = Circle(Point(0,height), 3)
self.marker.setFill("red")
self.marker.setOutline("red")
self.marker.draw(win)
def update(self, dt):
""" Move the shot dt seconds farther along its flight """
self.proj.update(dt)
center = self.marker.getCenter()
dx = self.proj.getX() - center.getX()
dy = self.proj.getY() - center.getY()
self.marker.move(dx,dy)
def getX(self):
""" return the current x coordinate of the shot's center """
return self.proj.getX()
def getY(self):
""" return the current y coordinate of the shot's center """
return self.proj.getY()
def undraw(self):
""" undraw the shot """
self.marker.undraw()
class ProjectileApp:
def __init__(self):
self.win = GraphWin("Projectile Animation", 640, 480)
self.win.setCoords(-10, -10, 210, 155)
Line(Point(-10,0), Point(210,0)).draw(self.win)
for x in range(0, 210, 50):
Text(Point(x,-7), str(x)).draw(self.win)
Line(Point(x,0), Point(x,2)).draw(self.win)
self.launcher = Launcher(self.win)
self.shots = []
def updateShots(self, dt):
alive = []
for shot in self.shots:
shot.update(dt)
if shot.getY() >= 0 and shot.getX() < 210:
alive.append(shot)
else:
shot.undraw()
self.shots = alive
def run(self):
# main event/animation lopp
while True:
self.updateShots(1/30)
key = self.win.checkKey()
if key in ["q", "Q"]:
break
if key == "Up":
self.launcher.adjAngle(5)
elif key == "Down":
self.launcher.adjAngle(-5)
elif key == "Right":
self.launcher.adjVel(5)
elif key == "Left":
self.launcher.adjVel(-5)
elif key == "f":
self.shots.append(self.launcher.fire())
update(30)
self.win.close()
if __name__ == "__main__":
ProjectileApp().run()
|
flexible
|
{
"blob_id": "09aedd6cab0b8c6a05bbee5b336fcd38aea1f7b9",
"index": 3202,
"step-1": "<mask token>\n\n\nclass Launcher:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass ShotTracker:\n \"\"\" Graphical depiction of a projectile flight using a Circle \"\"\"\n\n def __init__(self, win, angle, velocity, height):\n \"\"\"win is the GraphWin to display the shot, angle, velocity, and\n height are initial projectile parameters.\n \"\"\"\n self.proj = Projectile(angle, velocity, height)\n self.marker = Circle(Point(0, height), 3)\n self.marker.setFill('red')\n self.marker.setOutline('red')\n self.marker.draw(win)\n\n def update(self, dt):\n \"\"\" Move the shot dt seconds farther along its flight \"\"\"\n self.proj.update(dt)\n center = self.marker.getCenter()\n dx = self.proj.getX() - center.getX()\n dy = self.proj.getY() - center.getY()\n self.marker.move(dx, dy)\n\n def getX(self):\n \"\"\" return the current x coordinate of the shot's center \"\"\"\n return self.proj.getX()\n\n def getY(self):\n \"\"\" return the current y coordinate of the shot's center \"\"\"\n return self.proj.getY()\n\n def undraw(self):\n \"\"\" undraw the shot \"\"\"\n self.marker.undraw()\n\n\nclass ProjectileApp:\n\n def __init__(self):\n self.win = GraphWin('Projectile Animation', 640, 480)\n self.win.setCoords(-10, -10, 210, 155)\n Line(Point(-10, 0), Point(210, 0)).draw(self.win)\n for x in range(0, 210, 50):\n Text(Point(x, -7), str(x)).draw(self.win)\n Line(Point(x, 0), Point(x, 2)).draw(self.win)\n self.launcher = Launcher(self.win)\n self.shots = []\n\n def updateShots(self, dt):\n alive = []\n for shot in self.shots:\n shot.update(dt)\n if shot.getY() >= 0 and shot.getX() < 210:\n alive.append(shot)\n else:\n shot.undraw()\n self.shots = alive\n\n def run(self):\n while True:\n self.updateShots(1 / 30)\n key = self.win.checkKey()\n if key in ['q', 'Q']:\n break\n if key == 'Up':\n self.launcher.adjAngle(5)\n elif key == 'Down':\n self.launcher.adjAngle(-5)\n elif key == 'Right':\n self.launcher.adjVel(5)\n elif key == 'Left':\n self.launcher.adjVel(-5)\n elif key == 'f':\n self.shots.append(self.launcher.fire())\n update(30)\n self.win.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Launcher:\n\n def __init__(self, win):\n \"\"\"Create inital launcher with angle 45 degrees and velocity 40\n win is the GraphWin to draw the launcher in.\n \"\"\"\n base = Circle(Point(0, 0), 3)\n base.setFill('red')\n base.setOutline('red')\n base.draw(win)\n self.win = win\n self.angle = radians(45.0)\n self.vel = 40.0\n self.arrow = Line(Point(0, 0), Point(0, 0)).draw(win)\n self.redraw()\n <mask token>\n <mask token>\n\n def adjVel(self, amt):\n \"\"\" change velocity by amt\"\"\"\n self.vel = self.vel + amt\n self.redraw()\n <mask token>\n\n\nclass ShotTracker:\n \"\"\" Graphical depiction of a projectile flight using a Circle \"\"\"\n\n def __init__(self, win, angle, velocity, height):\n \"\"\"win is the GraphWin to display the shot, angle, velocity, and\n height are initial projectile parameters.\n \"\"\"\n self.proj = Projectile(angle, velocity, height)\n self.marker = Circle(Point(0, height), 3)\n self.marker.setFill('red')\n self.marker.setOutline('red')\n self.marker.draw(win)\n\n def update(self, dt):\n \"\"\" Move the shot dt seconds farther along its flight \"\"\"\n self.proj.update(dt)\n center = self.marker.getCenter()\n dx = self.proj.getX() - center.getX()\n dy = self.proj.getY() - center.getY()\n self.marker.move(dx, dy)\n\n def getX(self):\n \"\"\" return the current x coordinate of the shot's center \"\"\"\n return self.proj.getX()\n\n def getY(self):\n \"\"\" return the current y coordinate of the shot's center \"\"\"\n return self.proj.getY()\n\n def undraw(self):\n \"\"\" undraw the shot \"\"\"\n self.marker.undraw()\n\n\nclass ProjectileApp:\n\n def __init__(self):\n self.win = GraphWin('Projectile Animation', 640, 480)\n self.win.setCoords(-10, -10, 210, 155)\n Line(Point(-10, 0), Point(210, 0)).draw(self.win)\n for x in range(0, 210, 50):\n Text(Point(x, -7), str(x)).draw(self.win)\n Line(Point(x, 0), Point(x, 2)).draw(self.win)\n self.launcher = Launcher(self.win)\n self.shots = []\n\n def updateShots(self, dt):\n alive = []\n for shot in self.shots:\n shot.update(dt)\n if shot.getY() >= 0 and shot.getX() < 210:\n alive.append(shot)\n else:\n shot.undraw()\n self.shots = alive\n\n def run(self):\n while True:\n self.updateShots(1 / 30)\n key = self.win.checkKey()\n if key in ['q', 'Q']:\n break\n if key == 'Up':\n self.launcher.adjAngle(5)\n elif key == 'Down':\n self.launcher.adjAngle(-5)\n elif key == 'Right':\n self.launcher.adjVel(5)\n elif key == 'Left':\n self.launcher.adjVel(-5)\n elif key == 'f':\n self.shots.append(self.launcher.fire())\n update(30)\n self.win.close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Launcher:\n\n def __init__(self, win):\n \"\"\"Create inital launcher with angle 45 degrees and velocity 40\n win is the GraphWin to draw the launcher in.\n \"\"\"\n base = Circle(Point(0, 0), 3)\n base.setFill('red')\n base.setOutline('red')\n base.draw(win)\n self.win = win\n self.angle = radians(45.0)\n self.vel = 40.0\n self.arrow = Line(Point(0, 0), Point(0, 0)).draw(win)\n self.redraw()\n <mask token>\n <mask token>\n\n def adjVel(self, amt):\n \"\"\" change velocity by amt\"\"\"\n self.vel = self.vel + amt\n self.redraw()\n\n def fire(self):\n return ShotTracker(self.win, degrees(self.angle), self.vel, 0.0)\n\n\nclass ShotTracker:\n \"\"\" Graphical depiction of a projectile flight using a Circle \"\"\"\n\n def __init__(self, win, angle, velocity, height):\n \"\"\"win is the GraphWin to display the shot, angle, velocity, and\n height are initial projectile parameters.\n \"\"\"\n self.proj = Projectile(angle, velocity, height)\n self.marker = Circle(Point(0, height), 3)\n self.marker.setFill('red')\n self.marker.setOutline('red')\n self.marker.draw(win)\n\n def update(self, dt):\n \"\"\" Move the shot dt seconds farther along its flight \"\"\"\n self.proj.update(dt)\n center = self.marker.getCenter()\n dx = self.proj.getX() - center.getX()\n dy = self.proj.getY() - center.getY()\n self.marker.move(dx, dy)\n\n def getX(self):\n \"\"\" return the current x coordinate of the shot's center \"\"\"\n return self.proj.getX()\n\n def getY(self):\n \"\"\" return the current y coordinate of the shot's center \"\"\"\n return self.proj.getY()\n\n def undraw(self):\n \"\"\" undraw the shot \"\"\"\n self.marker.undraw()\n\n\nclass ProjectileApp:\n\n def __init__(self):\n self.win = GraphWin('Projectile Animation', 640, 480)\n self.win.setCoords(-10, -10, 210, 155)\n Line(Point(-10, 0), Point(210, 0)).draw(self.win)\n for x in range(0, 210, 50):\n Text(Point(x, -7), str(x)).draw(self.win)\n Line(Point(x, 0), Point(x, 2)).draw(self.win)\n self.launcher = Launcher(self.win)\n self.shots = []\n\n def updateShots(self, dt):\n alive = []\n for shot in self.shots:\n shot.update(dt)\n if shot.getY() >= 0 and shot.getX() < 210:\n alive.append(shot)\n else:\n shot.undraw()\n self.shots = alive\n\n def run(self):\n while True:\n self.updateShots(1 / 30)\n key = self.win.checkKey()\n if key in ['q', 'Q']:\n break\n if key == 'Up':\n self.launcher.adjAngle(5)\n elif key == 'Down':\n self.launcher.adjAngle(-5)\n elif key == 'Right':\n self.launcher.adjVel(5)\n elif key == 'Left':\n self.launcher.adjVel(-5)\n elif key == 'f':\n self.shots.append(self.launcher.fire())\n update(30)\n self.win.close()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Launcher:\n\n def __init__(self, win):\n \"\"\"Create inital launcher with angle 45 degrees and velocity 40\n win is the GraphWin to draw the launcher in.\n \"\"\"\n base = Circle(Point(0, 0), 3)\n base.setFill('red')\n base.setOutline('red')\n base.draw(win)\n self.win = win\n self.angle = radians(45.0)\n self.vel = 40.0\n self.arrow = Line(Point(0, 0), Point(0, 0)).draw(win)\n self.redraw()\n\n def redraw(self):\n \"\"\"undraw the arrow and draw a new one for the\n current values of angle and velocity.\n \"\"\"\n self.arrow.undraw()\n pt2 = Point(self.vel * cos(self.angle), self.vel * sin(self.angle))\n self.arrow = Line(Point(0, 0), pt2).draw(self.win)\n self.arrow.setArrow('last')\n self.arrow.setWidth(3)\n\n def adjAngle(self, amt):\n \"\"\" change angle by amt degrees \"\"\"\n self.angle = self.angle + radians(amt)\n self.redraw()\n\n def adjVel(self, amt):\n \"\"\" change velocity by amt\"\"\"\n self.vel = self.vel + amt\n self.redraw()\n\n def fire(self):\n return ShotTracker(self.win, degrees(self.angle), self.vel, 0.0)\n\n\nclass ShotTracker:\n \"\"\" Graphical depiction of a projectile flight using a Circle \"\"\"\n\n def __init__(self, win, angle, velocity, height):\n \"\"\"win is the GraphWin to display the shot, angle, velocity, and\n height are initial projectile parameters.\n \"\"\"\n self.proj = Projectile(angle, velocity, height)\n self.marker = Circle(Point(0, height), 3)\n self.marker.setFill('red')\n self.marker.setOutline('red')\n self.marker.draw(win)\n\n def update(self, dt):\n \"\"\" Move the shot dt seconds farther along its flight \"\"\"\n self.proj.update(dt)\n center = self.marker.getCenter()\n dx = self.proj.getX() - center.getX()\n dy = self.proj.getY() - center.getY()\n self.marker.move(dx, dy)\n\n def getX(self):\n \"\"\" return the current x coordinate of the shot's center \"\"\"\n return self.proj.getX()\n\n def getY(self):\n \"\"\" return the current y coordinate of the shot's center \"\"\"\n return self.proj.getY()\n\n def undraw(self):\n \"\"\" undraw the shot \"\"\"\n self.marker.undraw()\n\n\nclass ProjectileApp:\n\n def __init__(self):\n self.win = GraphWin('Projectile Animation', 640, 480)\n self.win.setCoords(-10, -10, 210, 155)\n Line(Point(-10, 0), Point(210, 0)).draw(self.win)\n for x in range(0, 210, 50):\n Text(Point(x, -7), str(x)).draw(self.win)\n Line(Point(x, 0), Point(x, 2)).draw(self.win)\n self.launcher = Launcher(self.win)\n self.shots = []\n\n def updateShots(self, dt):\n alive = []\n for shot in self.shots:\n shot.update(dt)\n if shot.getY() >= 0 and shot.getX() < 210:\n alive.append(shot)\n else:\n shot.undraw()\n self.shots = alive\n\n def run(self):\n while True:\n self.updateShots(1 / 30)\n key = self.win.checkKey()\n if key in ['q', 'Q']:\n break\n if key == 'Up':\n self.launcher.adjAngle(5)\n elif key == 'Down':\n self.launcher.adjAngle(-5)\n elif key == 'Right':\n self.launcher.adjVel(5)\n elif key == 'Left':\n self.launcher.adjVel(-5)\n elif key == 'f':\n self.shots.append(self.launcher.fire())\n update(30)\n self.win.close()\n\n\n<mask token>\n",
"step-5": "# animation2.py\n\n# multiple-shot cannonball animation\n\nfrom math import sqrt, sin, cos, radians, degrees\nfrom graphics import *\nfrom projectile import Projectile\nfrom button import Button\n\nclass Launcher:\n\n def __init__(self, win):\n \"\"\"Create inital launcher with angle 45 degrees and velocity 40\n win is the GraphWin to draw the launcher in.\n \"\"\"\n \n # draw the base shot of the launcher\n base = Circle(Point(0,0), 3)\n base.setFill(\"red\")\n base.setOutline(\"red\")\n base.draw(win)\n\n # save the window and create initial angle and velocity\n self.win = win\n self.angle = radians(45.0)\n self.vel = 40.0\n \n # create inital \"dummy\" arrow\n self.arrow = Line(Point(0,0), Point(0,0)).draw(win)\n # replace it with the correct arrow\n self.redraw()\n\n \n def redraw(self):\n \"\"\"undraw the arrow and draw a new one for the\n current values of angle and velocity.\n \"\"\"\n \n self.arrow.undraw()\n pt2 = Point(self.vel*cos(self.angle), self.vel*sin(self.angle))\n self.arrow = Line(Point(0,0), pt2).draw(self.win)\n self.arrow.setArrow(\"last\")\n self.arrow.setWidth(3)\n\n \n def adjAngle(self, amt):\n \"\"\" change angle by amt degrees \"\"\"\n \n self.angle = self.angle+radians(amt)\n self.redraw()\n\n \n def adjVel(self, amt):\n \"\"\" change velocity by amt\"\"\"\n \n self.vel = self.vel + amt\n self.redraw()\n\n def fire(self):\n return ShotTracker(self.win, degrees(self.angle), self.vel, 0.0)\n \n\nclass ShotTracker:\n\n \"\"\" Graphical depiction of a projectile flight using a Circle \"\"\"\n\n def __init__(self, win, angle, velocity, height):\n \"\"\"win is the GraphWin to display the shot, angle, velocity, and\n height are initial projectile parameters.\n \"\"\"\n \n self.proj = Projectile(angle, velocity, height)\n self.marker = Circle(Point(0,height), 3)\n self.marker.setFill(\"red\")\n self.marker.setOutline(\"red\")\n self.marker.draw(win)\n\n \n def update(self, dt):\n \"\"\" Move the shot dt seconds farther along its flight \"\"\"\n \n self.proj.update(dt)\n center = self.marker.getCenter()\n dx = self.proj.getX() - center.getX()\n dy = self.proj.getY() - center.getY()\n self.marker.move(dx,dy)\n\n \n def getX(self):\n \"\"\" return the current x coordinate of the shot's center \"\"\"\n return self.proj.getX()\n\n def getY(self):\n \"\"\" return the current y coordinate of the shot's center \"\"\"\n return self.proj.getY()\n\n def undraw(self):\n \"\"\" undraw the shot \"\"\"\n self.marker.undraw()\n\n\nclass ProjectileApp:\n\n def __init__(self):\n self.win = GraphWin(\"Projectile Animation\", 640, 480)\n self.win.setCoords(-10, -10, 210, 155)\n Line(Point(-10,0), Point(210,0)).draw(self.win)\n for x in range(0, 210, 50):\n Text(Point(x,-7), str(x)).draw(self.win)\n Line(Point(x,0), Point(x,2)).draw(self.win)\n\n self.launcher = Launcher(self.win)\n self.shots = []\n\n def updateShots(self, dt):\n alive = []\n for shot in self.shots:\n shot.update(dt)\n if shot.getY() >= 0 and shot.getX() < 210:\n alive.append(shot)\n else:\n shot.undraw()\n self.shots = alive\n\n def run(self):\n \n # main event/animation lopp\n while True:\n self.updateShots(1/30)\n \n key = self.win.checkKey()\n if key in [\"q\", \"Q\"]:\n break\n\n if key == \"Up\":\n self.launcher.adjAngle(5)\n elif key == \"Down\":\n self.launcher.adjAngle(-5)\n elif key == \"Right\":\n self.launcher.adjVel(5)\n elif key == \"Left\":\n self.launcher.adjVel(-5)\n elif key == \"f\":\n self.shots.append(self.launcher.fire())\n \n update(30)\n \n self.win.close()\n \n\nif __name__ == \"__main__\":\n ProjectileApp().run()\n",
"step-ids": [
12,
14,
15,
17,
20
]
}
|
[
12,
14,
15,
17,
20
] |
import numpy as np
import cv2
import datetime
import random
# from random import randint
import time
import logging
def GetDateTimeString():
dt = str(datetime.datetime.now()).split(".")[0]
clean = dt.replace(" ","_").replace(":","_")
return clean
def GetBackground(bgNumber):
# bgImage = './backgrounds/' + str(new_img_nums[bgNumber]) + '.jpg'
bgImage = '/home/pi/pibooth/backgrounds/space.jpg'
return cv2.imread(bgImage)
def GetImage(bg):
ret, frame = cam.read()
sensitivity = 1 # play with sensitivity to get rid of noise...
lowerRange = np.array([0, 0, 255 - sensitivity]) # this is currently set to white
upperRange = np.array([255, sensitivity, 255]) # this is currently set to white
#Mask the green screen
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
image_mask = cv2.inRange(hsv, lowerRange, upperRange)
bg_mask = cv2.bitwise_and(bg, bg, mask = image_mask)
fg_mask = cv2.bitwise_and(frame, frame, mask = cv2.bitwise_not(image_mask))
img = cv2.add(bg_mask, fg_mask)
return img
# Set up window for full screen
cv2.namedWindow("Photobooth", cv2.WND_PROP_FULLSCREEN)
# cv2.setWindowProperty("Photobooth", cv2.WND_PROP_FULLSCREEN, 1)
# options for countdown timer
fontFace = cv2.FONT_HERSHEY_SIMPLEX
fontScale = 1
thickness = 4
countdownSeconds = 5
displayPhotoSeconds = 5
# Set up WebCam
width = 640
height = 480
cam = cv2.VideoCapture(0)
cam.set(3,width)
cam.set(4,height)
bgNumber = 0
new_img_nums = random.sample(range(1,9), 4)
bg = GetBackground(bgNumber)
clicked = False
clickedTime = {}
while(True):
img = GetImage(bg) #get masked image from webcam
key = cv2.waitKey(1) #check for keypress
if clicked == True : # if countdown timer started
elapsed = datetime.datetime.now() - clickedTime
secs = int(elapsed.total_seconds())
if secs > countdownSeconds : # if five seconds are up, save the current image
clicked = False
cv2.imwrite('/home/pi/pibooth/newImages/img_' + GetDateTimeString() + '.jpg',img)
# cv2.imwrite('./newImages/img_' + GetDateTimeString() + '.jpg',img)
cv2.imshow('Photobooth',img)
time.sleep(displayPhotoSeconds) # show the photo for 5 seconds
bgNumber += 1
bg = GetBackground(bgNumber) # get a new background
else : # show the countdown timer
if secs - 5 == 1:
text = 'Say cheese!'
else:
text = str(5 - secs) + "..."
textSize, base = cv2.getTextSize(text, fontFace, fontScale, thickness)
textWidth = int((width - textSize[0]) / 2)
textHeight = int((height + textSize[1]) / 2)
cv2.putText(img, text, (textWidth, textHeight), fontFace, fontScale, (255, 255, 255), thickness)
elif key == 32 : # on spacebar pressed, start the countdown timer
clickedTime = datetime.datetime.now()
clicked = True
elif key == 27 : # on escape, close the program
break
elif bgNumber == 4:
# assemble photos into strip
# print strip
# reset app
break
cv2.imshow('Photobooth',img) #display masked image
cv2.destroyAllWindows()
cam.release()
|
normal
|
{
"blob_id": "a14c23398bbf42832a285d29c1b80aefc5fdaf6c",
"index": 9031,
"step-1": "<mask token>\n\n\ndef GetDateTimeString():\n dt = str(datetime.datetime.now()).split('.')[0]\n clean = dt.replace(' ', '_').replace(':', '_')\n return clean\n\n\ndef GetBackground(bgNumber):\n bgImage = '/home/pi/pibooth/backgrounds/space.jpg'\n return cv2.imread(bgImage)\n\n\ndef GetImage(bg):\n ret, frame = cam.read()\n sensitivity = 1\n lowerRange = np.array([0, 0, 255 - sensitivity])\n upperRange = np.array([255, sensitivity, 255])\n hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)\n image_mask = cv2.inRange(hsv, lowerRange, upperRange)\n bg_mask = cv2.bitwise_and(bg, bg, mask=image_mask)\n fg_mask = cv2.bitwise_and(frame, frame, mask=cv2.bitwise_not(image_mask))\n img = cv2.add(bg_mask, fg_mask)\n return img\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef GetDateTimeString():\n dt = str(datetime.datetime.now()).split('.')[0]\n clean = dt.replace(' ', '_').replace(':', '_')\n return clean\n\n\ndef GetBackground(bgNumber):\n bgImage = '/home/pi/pibooth/backgrounds/space.jpg'\n return cv2.imread(bgImage)\n\n\ndef GetImage(bg):\n ret, frame = cam.read()\n sensitivity = 1\n lowerRange = np.array([0, 0, 255 - sensitivity])\n upperRange = np.array([255, sensitivity, 255])\n hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)\n image_mask = cv2.inRange(hsv, lowerRange, upperRange)\n bg_mask = cv2.bitwise_and(bg, bg, mask=image_mask)\n fg_mask = cv2.bitwise_and(frame, frame, mask=cv2.bitwise_not(image_mask))\n img = cv2.add(bg_mask, fg_mask)\n return img\n\n\ncv2.namedWindow('Photobooth', cv2.WND_PROP_FULLSCREEN)\n<mask token>\ncam.set(3, width)\ncam.set(4, height)\n<mask token>\nwhile True:\n img = GetImage(bg)\n key = cv2.waitKey(1)\n if clicked == True:\n elapsed = datetime.datetime.now() - clickedTime\n secs = int(elapsed.total_seconds())\n if secs > countdownSeconds:\n clicked = False\n cv2.imwrite('/home/pi/pibooth/newImages/img_' +\n GetDateTimeString() + '.jpg', img)\n cv2.imshow('Photobooth', img)\n time.sleep(displayPhotoSeconds)\n bgNumber += 1\n bg = GetBackground(bgNumber)\n else:\n if secs - 5 == 1:\n text = 'Say cheese!'\n else:\n text = str(5 - secs) + '...'\n textSize, base = cv2.getTextSize(text, fontFace, fontScale,\n thickness)\n textWidth = int((width - textSize[0]) / 2)\n textHeight = int((height + textSize[1]) / 2)\n cv2.putText(img, text, (textWidth, textHeight), fontFace,\n fontScale, (255, 255, 255), thickness)\n elif key == 32:\n clickedTime = datetime.datetime.now()\n clicked = True\n elif key == 27:\n break\n elif bgNumber == 4:\n break\n cv2.imshow('Photobooth', img)\ncv2.destroyAllWindows()\ncam.release()\n",
"step-3": "<mask token>\n\n\ndef GetDateTimeString():\n dt = str(datetime.datetime.now()).split('.')[0]\n clean = dt.replace(' ', '_').replace(':', '_')\n return clean\n\n\ndef GetBackground(bgNumber):\n bgImage = '/home/pi/pibooth/backgrounds/space.jpg'\n return cv2.imread(bgImage)\n\n\ndef GetImage(bg):\n ret, frame = cam.read()\n sensitivity = 1\n lowerRange = np.array([0, 0, 255 - sensitivity])\n upperRange = np.array([255, sensitivity, 255])\n hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)\n image_mask = cv2.inRange(hsv, lowerRange, upperRange)\n bg_mask = cv2.bitwise_and(bg, bg, mask=image_mask)\n fg_mask = cv2.bitwise_and(frame, frame, mask=cv2.bitwise_not(image_mask))\n img = cv2.add(bg_mask, fg_mask)\n return img\n\n\ncv2.namedWindow('Photobooth', cv2.WND_PROP_FULLSCREEN)\nfontFace = cv2.FONT_HERSHEY_SIMPLEX\nfontScale = 1\nthickness = 4\ncountdownSeconds = 5\ndisplayPhotoSeconds = 5\nwidth = 640\nheight = 480\ncam = cv2.VideoCapture(0)\ncam.set(3, width)\ncam.set(4, height)\nbgNumber = 0\nnew_img_nums = random.sample(range(1, 9), 4)\nbg = GetBackground(bgNumber)\nclicked = False\nclickedTime = {}\nwhile True:\n img = GetImage(bg)\n key = cv2.waitKey(1)\n if clicked == True:\n elapsed = datetime.datetime.now() - clickedTime\n secs = int(elapsed.total_seconds())\n if secs > countdownSeconds:\n clicked = False\n cv2.imwrite('/home/pi/pibooth/newImages/img_' +\n GetDateTimeString() + '.jpg', img)\n cv2.imshow('Photobooth', img)\n time.sleep(displayPhotoSeconds)\n bgNumber += 1\n bg = GetBackground(bgNumber)\n else:\n if secs - 5 == 1:\n text = 'Say cheese!'\n else:\n text = str(5 - secs) + '...'\n textSize, base = cv2.getTextSize(text, fontFace, fontScale,\n thickness)\n textWidth = int((width - textSize[0]) / 2)\n textHeight = int((height + textSize[1]) / 2)\n cv2.putText(img, text, (textWidth, textHeight), fontFace,\n fontScale, (255, 255, 255), thickness)\n elif key == 32:\n clickedTime = datetime.datetime.now()\n clicked = True\n elif key == 27:\n break\n elif bgNumber == 4:\n break\n cv2.imshow('Photobooth', img)\ncv2.destroyAllWindows()\ncam.release()\n",
"step-4": "import numpy as np\nimport cv2\nimport datetime\nimport random\nimport time\nimport logging\n\n\ndef GetDateTimeString():\n dt = str(datetime.datetime.now()).split('.')[0]\n clean = dt.replace(' ', '_').replace(':', '_')\n return clean\n\n\ndef GetBackground(bgNumber):\n bgImage = '/home/pi/pibooth/backgrounds/space.jpg'\n return cv2.imread(bgImage)\n\n\ndef GetImage(bg):\n ret, frame = cam.read()\n sensitivity = 1\n lowerRange = np.array([0, 0, 255 - sensitivity])\n upperRange = np.array([255, sensitivity, 255])\n hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)\n image_mask = cv2.inRange(hsv, lowerRange, upperRange)\n bg_mask = cv2.bitwise_and(bg, bg, mask=image_mask)\n fg_mask = cv2.bitwise_and(frame, frame, mask=cv2.bitwise_not(image_mask))\n img = cv2.add(bg_mask, fg_mask)\n return img\n\n\ncv2.namedWindow('Photobooth', cv2.WND_PROP_FULLSCREEN)\nfontFace = cv2.FONT_HERSHEY_SIMPLEX\nfontScale = 1\nthickness = 4\ncountdownSeconds = 5\ndisplayPhotoSeconds = 5\nwidth = 640\nheight = 480\ncam = cv2.VideoCapture(0)\ncam.set(3, width)\ncam.set(4, height)\nbgNumber = 0\nnew_img_nums = random.sample(range(1, 9), 4)\nbg = GetBackground(bgNumber)\nclicked = False\nclickedTime = {}\nwhile True:\n img = GetImage(bg)\n key = cv2.waitKey(1)\n if clicked == True:\n elapsed = datetime.datetime.now() - clickedTime\n secs = int(elapsed.total_seconds())\n if secs > countdownSeconds:\n clicked = False\n cv2.imwrite('/home/pi/pibooth/newImages/img_' +\n GetDateTimeString() + '.jpg', img)\n cv2.imshow('Photobooth', img)\n time.sleep(displayPhotoSeconds)\n bgNumber += 1\n bg = GetBackground(bgNumber)\n else:\n if secs - 5 == 1:\n text = 'Say cheese!'\n else:\n text = str(5 - secs) + '...'\n textSize, base = cv2.getTextSize(text, fontFace, fontScale,\n thickness)\n textWidth = int((width - textSize[0]) / 2)\n textHeight = int((height + textSize[1]) / 2)\n cv2.putText(img, text, (textWidth, textHeight), fontFace,\n fontScale, (255, 255, 255), thickness)\n elif key == 32:\n clickedTime = datetime.datetime.now()\n clicked = True\n elif key == 27:\n break\n elif bgNumber == 4:\n break\n cv2.imshow('Photobooth', img)\ncv2.destroyAllWindows()\ncam.release()\n",
"step-5": "import numpy as np\nimport cv2\nimport datetime\nimport random\n# from random import randint\nimport time\nimport logging\n\ndef GetDateTimeString():\n dt = str(datetime.datetime.now()).split(\".\")[0]\n clean = dt.replace(\" \",\"_\").replace(\":\",\"_\")\n return clean\n\ndef GetBackground(bgNumber):\n # bgImage = './backgrounds/' + str(new_img_nums[bgNumber]) + '.jpg'\n bgImage = '/home/pi/pibooth/backgrounds/space.jpg'\n return cv2.imread(bgImage)\n\ndef GetImage(bg):\n ret, frame = cam.read()\n\n sensitivity = 1 # play with sensitivity to get rid of noise...\n lowerRange = np.array([0, 0, 255 - sensitivity]) # this is currently set to white\n upperRange = np.array([255, sensitivity, 255]) # this is currently set to white\n\n #Mask the green screen\n hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)\n image_mask = cv2.inRange(hsv, lowerRange, upperRange)\n bg_mask = cv2.bitwise_and(bg, bg, mask = image_mask)\n fg_mask = cv2.bitwise_and(frame, frame, mask = cv2.bitwise_not(image_mask))\n img = cv2.add(bg_mask, fg_mask)\n\n return img\n\n# Set up window for full screen\ncv2.namedWindow(\"Photobooth\", cv2.WND_PROP_FULLSCREEN)\n# cv2.setWindowProperty(\"Photobooth\", cv2.WND_PROP_FULLSCREEN, 1)\n\n# options for countdown timer\nfontFace = cv2.FONT_HERSHEY_SIMPLEX\nfontScale = 1\nthickness = 4\ncountdownSeconds = 5\ndisplayPhotoSeconds = 5\n\n# Set up WebCam\nwidth = 640\nheight = 480\n\ncam = cv2.VideoCapture(0)\ncam.set(3,width)\ncam.set(4,height)\n\nbgNumber = 0\nnew_img_nums = random.sample(range(1,9), 4)\n\nbg = GetBackground(bgNumber)\nclicked = False\nclickedTime = {}\n\nwhile(True):\n img = GetImage(bg) #get masked image from webcam\n\n key = cv2.waitKey(1) #check for keypress\n if clicked == True : # if countdown timer started\n elapsed = datetime.datetime.now() - clickedTime\n secs = int(elapsed.total_seconds())\n if secs > countdownSeconds : # if five seconds are up, save the current image\n clicked = False\n cv2.imwrite('/home/pi/pibooth/newImages/img_' + GetDateTimeString() + '.jpg',img)\n # cv2.imwrite('./newImages/img_' + GetDateTimeString() + '.jpg',img)\n cv2.imshow('Photobooth',img)\n time.sleep(displayPhotoSeconds) # show the photo for 5 seconds\n bgNumber += 1\n bg = GetBackground(bgNumber) # get a new background\n else : # show the countdown timer\n if secs - 5 == 1:\n text = 'Say cheese!'\n else:\n text = str(5 - secs) + \"...\"\n textSize, base = cv2.getTextSize(text, fontFace, fontScale, thickness)\n textWidth = int((width - textSize[0]) / 2)\n textHeight = int((height + textSize[1]) / 2)\n cv2.putText(img, text, (textWidth, textHeight), fontFace, fontScale, (255, 255, 255), thickness)\n elif key == 32 : # on spacebar pressed, start the countdown timer\n clickedTime = datetime.datetime.now()\n clicked = True\n elif key == 27 : # on escape, close the program\n break\n elif bgNumber == 4:\n # assemble photos into strip\n # print strip\n # reset app\n break\n\n cv2.imshow('Photobooth',img) #display masked image\n\ncv2.destroyAllWindows()\ncam.release()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
#!/usr/bin/env python
import ROOT
ROOT.gROOT.SetBatch()
ROOT.gROOT.ProcessLine('gErrorIgnoreLevel = kError;')
import os
import time
import varial.tools
import varial.generators as gen
import itertools
from varial.sample import Sample
import varial.analysis as analysis
# import varial.toolinterface
dirname = 'VLQToHiggsPairProd'
varial.settings.rootfile_postfixes = ['.png','.pdf']
varial.settings.git_tag = varial.settings.readgittag('./GITTAGGER_LOG.txt')
current_tag = varial.settings.git_tag
# sample definitions
smpls = list()
smpls.append(Sample(
name='QCD',
legend='QCD'
))
smpls.append(Sample(
name='TTJets',
legend='TTJets'
))
smpls.append(Sample(
name='WJets',
legend='WJets'
))
smpls.append(Sample(
name='ZJets',
legend='ZJets'
))
analysis.all_samples = dict((s.name, s) for s in smpls)
varial.settings.defaults_Legend['x_pos'] = 0.80
varial.settings.defaults_Legend['label_width'] = 0.36
varial.settings.defaults_Legend['label_height'] = 0.03
# varial.settings.debug_mode = True
varial.settings.box_text_size = 0.03
varial.settings.colors = {
'TTJets': 632,
'WJets': 878,
'ZJets': 596,
'TpTp_M1000': 870,
# 'TpJ_TH_M800_NonTlep': 434,
}
# SELECT HISTOGRAMS TO PLOT HERE!
# use these functions to specifically select histograms for plotting
current_cuts = ['AfterPresel', 'FullSelection'] # 'Nminus1-MuonPtCut', 'OneCut-HTCut', 'FullSelection', 'Nminus1-6OneHiggsTagCut'
current_hists = ['/EventHists', '/MuonHists'] # "/ElectronHists", '/MuonHists', '/JetHists', '/TopJetHists', '/EventHists', '/GenHists/w_decay_lin', '/GenHists/w_decay_log'
use_cuts = False
use_histos = False
varial.settings.stacking_order = ['ZJets', 'WJets', 'TTJets']
def select_histograms(wrp):
use_this = True
if use_cuts and all('NoGenSel-'+c not in wrp.in_file_path for c in current_cuts):
use_this = False
if wrp.name.startswith('cf_'):
use_this = False
if use_histos and all(c not in wrp.in_file_path for c in current_hists):
use_this = False
# if ('GenHists' in wrp.in_file_path and not (wrp.name.startswith('mu_') or wrp.name.startswith('genjet_'))):
# use_this = False
# if 'GenHists' in wrp.in_file_path and ('NoCuts' not in wrp.in_file_path and 'Nminus1-BTagCut' not in wrp.in_file_path):
# use_this = False
return use_this
def select_splithistograms(wrp):
use_this = True
if use_cuts and all('NoGenSel-'+c not in wrp.in_file_path for c in current_cuts):
use_this = False
if wrp.name.startswith('cf_'):
use_this = False
if use_histos and all(c not in wrp.in_file_path for c in current_hists):
use_this = False
# if ('GenHists' in wrp.in_file_path and not (wrp.name.startswith('mu_') or wrp.name.startswith('genjet_'))):
# use_this = False
# if 'GenHists' in wrp.in_file_path and ('NoCuts' not in wrp.in_file_path and 'Nminus1-BTagCut' not in wrp.in_file_path):
# use_this = False
return use_this
# SOME FUNCTIONS TO MANIPULATE HISTOGRAMS
def norm_to_first_bin(wrp):
histo = wrp.histo.Clone()
firstbin = histo.GetBinContent(1)
histo.Scale(1. / firstbin)
info = wrp.all_info()
info["lumi"] /= firstbin
return varial.wrappers.HistoWrapper(histo, **info)
def norm_histos_to_first_bin(wrps):
for wrp in wrps:
if isinstance(wrp, varial.wrappers.HistoWrapper):
yield norm_to_first_bin(wrp)
else:
yield wrp
def norm_histos_to_integral(wrps):
for wrp in wrps:
if isinstance(wrp, varial.wrappers.HistoWrapper):
yield varial.operations.norm_to_integral(wrp)
else:
yield wrp
def label_axes(wrps):
for w in wrps:
if 'TH1' in w.type and w.histo.GetXaxis().GetTitle() == '':
w.histo.GetXaxis().SetTitle(w.histo.GetTitle())
w.histo.GetYaxis().SetTitle('events')
w.histo.SetTitle('')
yield w
def norm_cf_plots(wrps):
for w in wrps:
if w.name.startswith('cf_') and isinstance(w, varial.wrappers.HistoWrapper):
yield varial.operations.norm_to_integral(w)
else:
yield w
# HOOK FUNCTIONS FOR PLOTTER_FACTORIES; manipulate histograms here
def for_stacked_hook(wrps):
# wrps = norm_cf_plots(wrps)
wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)
wrps = gen.gen_add_wrp_info(
wrps,
sample=lambda w: w.file_path.split('.')[-2],
analyzer=lambda w: w.in_file_path[0],
legend=lambda w: w.sample,
is_signal=lambda w: 'TpTp_M' in w.sample,
lumi=lambda w: 1.
)
# wrps = gen.imap_conditional(wrps, lambda w: 'TpJ_TH_M800' in w.sample, gen.op.norm_to_lumi)
wrps = label_axes(wrps)
return wrps
def norm_cf_hook(wrps):
wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)
wrps = norm_histos_to_first_bin(wrps)
wrps = label_axes(wrps)
return wrps
def do_nothing_hook(wrps):
wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)
wrps = label_axes(wrps)
return wrps
def for_eff_plots_hook(wrps):
wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)
wrps = gen.gen_add_wrp_info(
wrps,
sample=lambda w: w.file_path.split('.')[-2],
analyzer=lambda w: w.in_file_path[0],
legend=lambda w: ('100* ' if 'TpTp_M' in w.sample else '') + w.sample,
is_signal=lambda w: 'TpTp_M' in w.sample,
lumi=lambda w: 0.01 if 'TpTp_M' in w.sample else 1.
)
wrps = gen.gen_make_eff_graphs(wrps)
wrps = label_axes(wrps)
return wrps
# def calc_stack_order(wrps):
# for w in wrps:
# def stack_by_max(wrps):
# wrps = calc_stack_order(wrps)
# wrps = gen.mc_stack_n_data_sum(wrps)
# return wrps
# PLOTTER FACTORIES; select here in general which histograms to plot, how to manipulate them a.s.o.
def stack_histos_factory(**kws):
kws['filter_keyfunc'] = lambda w: 'TH1' in w.type
kws['hook_loaded_histos'] = for_stacked_hook
kws['plot_setup'] = gen.mc_stack_n_data_sum
kws['save_lin_log_scale'] = True
# kws['save_log_scale'] = True
# kws['hook_canvas_pre_build'] = canvas_hook
# kws['hook_canvas_post_build'] = canvas_hook
return varial.tools.Plotter(**kws)
def norm_cf_factory(**kws):
# kws['filter_keyfunc'] = lambda w: 'TH1' in w.type
kws['hook_loaded_histos'] = norm_cf_hook
kws['save_lin_log_scale'] = True
kws['save_name_func'] = lambda w : w.name + '_norm'
# kws['save_log_scale'] = True
# kws['hook_canvas_pre_build'] = canvas_hook
# kws['hook_canvas_post_build'] = canvas_hook
return varial.tools.Plotter(**kws)
def do_nothing_factory(**kws):
# kws['filter_keyfunc'] = lambda w: 'TH1' in w.type
kws['hook_loaded_histos'] = do_nothing_hook
kws['save_lin_log_scale'] = True
# kws['save_log_scale'] = True
# kws['hook_canvas_pre_build'] = canvas_hook
# kws['hook_canvas_post_build'] = canvas_hook
return varial.tools.Plotter(**kws)
def for_eff_factory(**kws):
kws['filter_keyfunc'] = lambda w: 'TH1' in w.type
kws['hook_loaded_histos'] = for_eff_plots_hook
kws['save_lin_log_scale'] = True
# kws['save_log_scale'] = True
# kws['hook_canvas_pre_build'] = canvas_hook
# kws['hook_canvas_post_build'] = canvas_hook
return varial.tools.Plotter(**kws)
def create_name(name):
return name+'v'+varial.settings.git_tag
tagger = varial.tools.GitTagger('./GITTAGGER_LOG.txt')
tagger.run()
p1 = varial.tools.mk_rootfile_plotter(
name=create_name(dirname),
# filter_keyfunc=lambda w: not w.name.startswith('cf_'),
filter_keyfunc=select_histograms,
plotter_factory=stack_histos_factory,
combine_files=True
)
p2 = varial.tools.mk_rootfile_plotter(
name=create_name(dirname),
filter_keyfunc=lambda w: w.name.startswith('cf_') and not w.name.endswith('raw'),
plotter_factory=norm_cf_factory,
combine_files=True
)
p3 = varial.tools.mk_rootfile_plotter(
name=create_name(dirname),
filter_keyfunc=lambda w: w.name.startswith('cf_') and not w.name.endswith('raw'),
plotter_factory=do_nothing_factory,
combine_files=True
)
p4 = varial.tools.mk_rootfile_plotter(
name=create_name(dirname)+'split',
pattern='v1.19_unmerged_files/*.root',
filter_keyfunc=select_splithistograms,
plotter_factory=for_eff_factory,
combine_files=False
)
p5 = varial.tools.mk_rootfile_plotter(
name=create_name(dirname)+'split',
# filter_keyfunc=lambda w: not w.name.startswith('cf_'),
filter_keyfunc=select_splithistograms,
plotter_factory=for_eff_factory,
combine_files=True
)
p6 = varial.tools.mk_rootfile_plotter(
name=create_name(dirname)+'split',
pattern='v1.19_unmerged_files/*.root',
filter_keyfunc=lambda w: w.name.startswith('cf_') and not w.name.endswith('raw'),
plotter_factory=norm_cf_factory,
combine_files=False
)
p7 = varial.tools.mk_rootfile_plotter(
name=create_name(dirname)+'split',
pattern='v1.19_unmerged_files/*.root',
filter_keyfunc=lambda w: w.name.startswith('cf_') and not w.name.endswith('raw'),
plotter_factory=do_nothing_factory,
combine_files=False
)
time.sleep(1)
p1.run()
p2.run()
p3.run()
# p4.run()
p5.run()
# p6.run()
# p7.run()
varial.tools.WebCreator().run()
# os.system('rm -r ~/www/TprimeAnalysis/%s' % create_name(dirname))
# os.system('cp -r %s ~/www/TprimeAnalysis/' % create_name(dirname))
|
normal
|
{
"blob_id": "05ced056bf2f59f85bef82e53803e7df7ff8c8df",
"index": 1156,
"step-1": "<mask token>\n\n\ndef select_histograms(wrp):\n use_this = True\n if use_cuts and all('NoGenSel-' + c not in wrp.in_file_path for c in\n current_cuts):\n use_this = False\n if wrp.name.startswith('cf_'):\n use_this = False\n if use_histos and all(c not in wrp.in_file_path for c in current_hists):\n use_this = False\n return use_this\n\n\n<mask token>\n\n\ndef norm_to_first_bin(wrp):\n histo = wrp.histo.Clone()\n firstbin = histo.GetBinContent(1)\n histo.Scale(1.0 / firstbin)\n info = wrp.all_info()\n info['lumi'] /= firstbin\n return varial.wrappers.HistoWrapper(histo, **info)\n\n\n<mask token>\n\n\ndef norm_histos_to_integral(wrps):\n for wrp in wrps:\n if isinstance(wrp, varial.wrappers.HistoWrapper):\n yield varial.operations.norm_to_integral(wrp)\n else:\n yield wrp\n\n\ndef label_axes(wrps):\n for w in wrps:\n if 'TH1' in w.type and w.histo.GetXaxis().GetTitle() == '':\n w.histo.GetXaxis().SetTitle(w.histo.GetTitle())\n w.histo.GetYaxis().SetTitle('events')\n w.histo.SetTitle('')\n yield w\n\n\n<mask token>\n\n\ndef for_stacked_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = gen.gen_add_wrp_info(wrps, sample=lambda w: w.file_path.split(\n '.')[-2], analyzer=lambda w: w.in_file_path[0], legend=lambda w: w.\n sample, is_signal=lambda w: 'TpTp_M' in w.sample, lumi=lambda w: 1.0)\n wrps = label_axes(wrps)\n return wrps\n\n\n<mask token>\n\n\ndef do_nothing_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = label_axes(wrps)\n return wrps\n\n\n<mask token>\n\n\ndef norm_cf_factory(**kws):\n kws['hook_loaded_histos'] = norm_cf_hook\n kws['save_lin_log_scale'] = True\n kws['save_name_func'] = lambda w: w.name + '_norm'\n return varial.tools.Plotter(**kws)\n\n\ndef do_nothing_factory(**kws):\n kws['hook_loaded_histos'] = do_nothing_hook\n kws['save_lin_log_scale'] = True\n return varial.tools.Plotter(**kws)\n\n\ndef for_eff_factory(**kws):\n kws['filter_keyfunc'] = lambda w: 'TH1' in w.type\n kws['hook_loaded_histos'] = for_eff_plots_hook\n kws['save_lin_log_scale'] = True\n return varial.tools.Plotter(**kws)\n\n\ndef create_name(name):\n return name + 'v' + varial.settings.git_tag\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef select_histograms(wrp):\n use_this = True\n if use_cuts and all('NoGenSel-' + c not in wrp.in_file_path for c in\n current_cuts):\n use_this = False\n if wrp.name.startswith('cf_'):\n use_this = False\n if use_histos and all(c not in wrp.in_file_path for c in current_hists):\n use_this = False\n return use_this\n\n\ndef select_splithistograms(wrp):\n use_this = True\n if use_cuts and all('NoGenSel-' + c not in wrp.in_file_path for c in\n current_cuts):\n use_this = False\n if wrp.name.startswith('cf_'):\n use_this = False\n if use_histos and all(c not in wrp.in_file_path for c in current_hists):\n use_this = False\n return use_this\n\n\ndef norm_to_first_bin(wrp):\n histo = wrp.histo.Clone()\n firstbin = histo.GetBinContent(1)\n histo.Scale(1.0 / firstbin)\n info = wrp.all_info()\n info['lumi'] /= firstbin\n return varial.wrappers.HistoWrapper(histo, **info)\n\n\n<mask token>\n\n\ndef norm_histos_to_integral(wrps):\n for wrp in wrps:\n if isinstance(wrp, varial.wrappers.HistoWrapper):\n yield varial.operations.norm_to_integral(wrp)\n else:\n yield wrp\n\n\ndef label_axes(wrps):\n for w in wrps:\n if 'TH1' in w.type and w.histo.GetXaxis().GetTitle() == '':\n w.histo.GetXaxis().SetTitle(w.histo.GetTitle())\n w.histo.GetYaxis().SetTitle('events')\n w.histo.SetTitle('')\n yield w\n\n\ndef norm_cf_plots(wrps):\n for w in wrps:\n if w.name.startswith('cf_') and isinstance(w, varial.wrappers.\n HistoWrapper):\n yield varial.operations.norm_to_integral(w)\n else:\n yield w\n\n\ndef for_stacked_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = gen.gen_add_wrp_info(wrps, sample=lambda w: w.file_path.split(\n '.')[-2], analyzer=lambda w: w.in_file_path[0], legend=lambda w: w.\n sample, is_signal=lambda w: 'TpTp_M' in w.sample, lumi=lambda w: 1.0)\n wrps = label_axes(wrps)\n return wrps\n\n\n<mask token>\n\n\ndef do_nothing_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = label_axes(wrps)\n return wrps\n\n\ndef for_eff_plots_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = gen.gen_add_wrp_info(wrps, sample=lambda w: w.file_path.split(\n '.')[-2], analyzer=lambda w: w.in_file_path[0], legend=lambda w: (\n '100* ' if 'TpTp_M' in w.sample else '') + w.sample, is_signal=lambda\n w: 'TpTp_M' in w.sample, lumi=lambda w: 0.01 if 'TpTp_M' in w.\n sample else 1.0)\n wrps = gen.gen_make_eff_graphs(wrps)\n wrps = label_axes(wrps)\n return wrps\n\n\n<mask token>\n\n\ndef norm_cf_factory(**kws):\n kws['hook_loaded_histos'] = norm_cf_hook\n kws['save_lin_log_scale'] = True\n kws['save_name_func'] = lambda w: w.name + '_norm'\n return varial.tools.Plotter(**kws)\n\n\ndef do_nothing_factory(**kws):\n kws['hook_loaded_histos'] = do_nothing_hook\n kws['save_lin_log_scale'] = True\n return varial.tools.Plotter(**kws)\n\n\ndef for_eff_factory(**kws):\n kws['filter_keyfunc'] = lambda w: 'TH1' in w.type\n kws['hook_loaded_histos'] = for_eff_plots_hook\n kws['save_lin_log_scale'] = True\n return varial.tools.Plotter(**kws)\n\n\ndef create_name(name):\n return name + 'v' + varial.settings.git_tag\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef select_histograms(wrp):\n use_this = True\n if use_cuts and all('NoGenSel-' + c not in wrp.in_file_path for c in\n current_cuts):\n use_this = False\n if wrp.name.startswith('cf_'):\n use_this = False\n if use_histos and all(c not in wrp.in_file_path for c in current_hists):\n use_this = False\n return use_this\n\n\ndef select_splithistograms(wrp):\n use_this = True\n if use_cuts and all('NoGenSel-' + c not in wrp.in_file_path for c in\n current_cuts):\n use_this = False\n if wrp.name.startswith('cf_'):\n use_this = False\n if use_histos and all(c not in wrp.in_file_path for c in current_hists):\n use_this = False\n return use_this\n\n\ndef norm_to_first_bin(wrp):\n histo = wrp.histo.Clone()\n firstbin = histo.GetBinContent(1)\n histo.Scale(1.0 / firstbin)\n info = wrp.all_info()\n info['lumi'] /= firstbin\n return varial.wrappers.HistoWrapper(histo, **info)\n\n\ndef norm_histos_to_first_bin(wrps):\n for wrp in wrps:\n if isinstance(wrp, varial.wrappers.HistoWrapper):\n yield norm_to_first_bin(wrp)\n else:\n yield wrp\n\n\ndef norm_histos_to_integral(wrps):\n for wrp in wrps:\n if isinstance(wrp, varial.wrappers.HistoWrapper):\n yield varial.operations.norm_to_integral(wrp)\n else:\n yield wrp\n\n\ndef label_axes(wrps):\n for w in wrps:\n if 'TH1' in w.type and w.histo.GetXaxis().GetTitle() == '':\n w.histo.GetXaxis().SetTitle(w.histo.GetTitle())\n w.histo.GetYaxis().SetTitle('events')\n w.histo.SetTitle('')\n yield w\n\n\ndef norm_cf_plots(wrps):\n for w in wrps:\n if w.name.startswith('cf_') and isinstance(w, varial.wrappers.\n HistoWrapper):\n yield varial.operations.norm_to_integral(w)\n else:\n yield w\n\n\ndef for_stacked_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = gen.gen_add_wrp_info(wrps, sample=lambda w: w.file_path.split(\n '.')[-2], analyzer=lambda w: w.in_file_path[0], legend=lambda w: w.\n sample, is_signal=lambda w: 'TpTp_M' in w.sample, lumi=lambda w: 1.0)\n wrps = label_axes(wrps)\n return wrps\n\n\n<mask token>\n\n\ndef do_nothing_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = label_axes(wrps)\n return wrps\n\n\ndef for_eff_plots_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = gen.gen_add_wrp_info(wrps, sample=lambda w: w.file_path.split(\n '.')[-2], analyzer=lambda w: w.in_file_path[0], legend=lambda w: (\n '100* ' if 'TpTp_M' in w.sample else '') + w.sample, is_signal=lambda\n w: 'TpTp_M' in w.sample, lumi=lambda w: 0.01 if 'TpTp_M' in w.\n sample else 1.0)\n wrps = gen.gen_make_eff_graphs(wrps)\n wrps = label_axes(wrps)\n return wrps\n\n\n<mask token>\n\n\ndef norm_cf_factory(**kws):\n kws['hook_loaded_histos'] = norm_cf_hook\n kws['save_lin_log_scale'] = True\n kws['save_name_func'] = lambda w: w.name + '_norm'\n return varial.tools.Plotter(**kws)\n\n\ndef do_nothing_factory(**kws):\n kws['hook_loaded_histos'] = do_nothing_hook\n kws['save_lin_log_scale'] = True\n return varial.tools.Plotter(**kws)\n\n\ndef for_eff_factory(**kws):\n kws['filter_keyfunc'] = lambda w: 'TH1' in w.type\n kws['hook_loaded_histos'] = for_eff_plots_hook\n kws['save_lin_log_scale'] = True\n return varial.tools.Plotter(**kws)\n\n\ndef create_name(name):\n return name + 'v' + varial.settings.git_tag\n\n\n<mask token>\n",
"step-4": "<mask token>\nROOT.gROOT.SetBatch()\nROOT.gROOT.ProcessLine('gErrorIgnoreLevel = kError;')\n<mask token>\ndirname = 'VLQToHiggsPairProd'\nvarial.settings.rootfile_postfixes = ['.png', '.pdf']\nvarial.settings.git_tag = varial.settings.readgittag('./GITTAGGER_LOG.txt')\ncurrent_tag = varial.settings.git_tag\nsmpls = list()\nsmpls.append(Sample(name='QCD', legend='QCD'))\nsmpls.append(Sample(name='TTJets', legend='TTJets'))\nsmpls.append(Sample(name='WJets', legend='WJets'))\nsmpls.append(Sample(name='ZJets', legend='ZJets'))\nanalysis.all_samples = dict((s.name, s) for s in smpls)\nvarial.settings.defaults_Legend['x_pos'] = 0.8\nvarial.settings.defaults_Legend['label_width'] = 0.36\nvarial.settings.defaults_Legend['label_height'] = 0.03\nvarial.settings.box_text_size = 0.03\nvarial.settings.colors = {'TTJets': 632, 'WJets': 878, 'ZJets': 596,\n 'TpTp_M1000': 870}\ncurrent_cuts = ['AfterPresel', 'FullSelection']\ncurrent_hists = ['/EventHists', '/MuonHists']\nuse_cuts = False\nuse_histos = False\nvarial.settings.stacking_order = ['ZJets', 'WJets', 'TTJets']\n\n\ndef select_histograms(wrp):\n use_this = True\n if use_cuts and all('NoGenSel-' + c not in wrp.in_file_path for c in\n current_cuts):\n use_this = False\n if wrp.name.startswith('cf_'):\n use_this = False\n if use_histos and all(c not in wrp.in_file_path for c in current_hists):\n use_this = False\n return use_this\n\n\ndef select_splithistograms(wrp):\n use_this = True\n if use_cuts and all('NoGenSel-' + c not in wrp.in_file_path for c in\n current_cuts):\n use_this = False\n if wrp.name.startswith('cf_'):\n use_this = False\n if use_histos and all(c not in wrp.in_file_path for c in current_hists):\n use_this = False\n return use_this\n\n\ndef norm_to_first_bin(wrp):\n histo = wrp.histo.Clone()\n firstbin = histo.GetBinContent(1)\n histo.Scale(1.0 / firstbin)\n info = wrp.all_info()\n info['lumi'] /= firstbin\n return varial.wrappers.HistoWrapper(histo, **info)\n\n\ndef norm_histos_to_first_bin(wrps):\n for wrp in wrps:\n if isinstance(wrp, varial.wrappers.HistoWrapper):\n yield norm_to_first_bin(wrp)\n else:\n yield wrp\n\n\ndef norm_histos_to_integral(wrps):\n for wrp in wrps:\n if isinstance(wrp, varial.wrappers.HistoWrapper):\n yield varial.operations.norm_to_integral(wrp)\n else:\n yield wrp\n\n\ndef label_axes(wrps):\n for w in wrps:\n if 'TH1' in w.type and w.histo.GetXaxis().GetTitle() == '':\n w.histo.GetXaxis().SetTitle(w.histo.GetTitle())\n w.histo.GetYaxis().SetTitle('events')\n w.histo.SetTitle('')\n yield w\n\n\ndef norm_cf_plots(wrps):\n for w in wrps:\n if w.name.startswith('cf_') and isinstance(w, varial.wrappers.\n HistoWrapper):\n yield varial.operations.norm_to_integral(w)\n else:\n yield w\n\n\ndef for_stacked_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = gen.gen_add_wrp_info(wrps, sample=lambda w: w.file_path.split(\n '.')[-2], analyzer=lambda w: w.in_file_path[0], legend=lambda w: w.\n sample, is_signal=lambda w: 'TpTp_M' in w.sample, lumi=lambda w: 1.0)\n wrps = label_axes(wrps)\n return wrps\n\n\ndef norm_cf_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = norm_histos_to_first_bin(wrps)\n wrps = label_axes(wrps)\n return wrps\n\n\ndef do_nothing_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = label_axes(wrps)\n return wrps\n\n\ndef for_eff_plots_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = gen.gen_add_wrp_info(wrps, sample=lambda w: w.file_path.split(\n '.')[-2], analyzer=lambda w: w.in_file_path[0], legend=lambda w: (\n '100* ' if 'TpTp_M' in w.sample else '') + w.sample, is_signal=lambda\n w: 'TpTp_M' in w.sample, lumi=lambda w: 0.01 if 'TpTp_M' in w.\n sample else 1.0)\n wrps = gen.gen_make_eff_graphs(wrps)\n wrps = label_axes(wrps)\n return wrps\n\n\ndef stack_histos_factory(**kws):\n kws['filter_keyfunc'] = lambda w: 'TH1' in w.type\n kws['hook_loaded_histos'] = for_stacked_hook\n kws['plot_setup'] = gen.mc_stack_n_data_sum\n kws['save_lin_log_scale'] = True\n return varial.tools.Plotter(**kws)\n\n\ndef norm_cf_factory(**kws):\n kws['hook_loaded_histos'] = norm_cf_hook\n kws['save_lin_log_scale'] = True\n kws['save_name_func'] = lambda w: w.name + '_norm'\n return varial.tools.Plotter(**kws)\n\n\ndef do_nothing_factory(**kws):\n kws['hook_loaded_histos'] = do_nothing_hook\n kws['save_lin_log_scale'] = True\n return varial.tools.Plotter(**kws)\n\n\ndef for_eff_factory(**kws):\n kws['filter_keyfunc'] = lambda w: 'TH1' in w.type\n kws['hook_loaded_histos'] = for_eff_plots_hook\n kws['save_lin_log_scale'] = True\n return varial.tools.Plotter(**kws)\n\n\ndef create_name(name):\n return name + 'v' + varial.settings.git_tag\n\n\ntagger = varial.tools.GitTagger('./GITTAGGER_LOG.txt')\ntagger.run()\np1 = varial.tools.mk_rootfile_plotter(name=create_name(dirname),\n filter_keyfunc=select_histograms, plotter_factory=stack_histos_factory,\n combine_files=True)\np2 = varial.tools.mk_rootfile_plotter(name=create_name(dirname),\n filter_keyfunc=lambda w: w.name.startswith('cf_') and not w.name.\n endswith('raw'), plotter_factory=norm_cf_factory, combine_files=True)\np3 = varial.tools.mk_rootfile_plotter(name=create_name(dirname),\n filter_keyfunc=lambda w: w.name.startswith('cf_') and not w.name.\n endswith('raw'), plotter_factory=do_nothing_factory, combine_files=True)\np4 = varial.tools.mk_rootfile_plotter(name=create_name(dirname) + 'split',\n pattern='v1.19_unmerged_files/*.root', filter_keyfunc=\n select_splithistograms, plotter_factory=for_eff_factory, combine_files=\n False)\np5 = varial.tools.mk_rootfile_plotter(name=create_name(dirname) + 'split',\n filter_keyfunc=select_splithistograms, plotter_factory=for_eff_factory,\n combine_files=True)\np6 = varial.tools.mk_rootfile_plotter(name=create_name(dirname) + 'split',\n pattern='v1.19_unmerged_files/*.root', filter_keyfunc=lambda w: w.name.\n startswith('cf_') and not w.name.endswith('raw'), plotter_factory=\n norm_cf_factory, combine_files=False)\np7 = varial.tools.mk_rootfile_plotter(name=create_name(dirname) + 'split',\n pattern='v1.19_unmerged_files/*.root', filter_keyfunc=lambda w: w.name.\n startswith('cf_') and not w.name.endswith('raw'), plotter_factory=\n do_nothing_factory, combine_files=False)\ntime.sleep(1)\np1.run()\np2.run()\np3.run()\np5.run()\nvarial.tools.WebCreator().run()\n",
"step-5": "#!/usr/bin/env python\n\nimport ROOT\nROOT.gROOT.SetBatch()\nROOT.gROOT.ProcessLine('gErrorIgnoreLevel = kError;')\n\nimport os\nimport time\nimport varial.tools\nimport varial.generators as gen\nimport itertools\nfrom varial.sample import Sample\nimport varial.analysis as analysis\n# import varial.toolinterface\n\ndirname = 'VLQToHiggsPairProd'\n\nvarial.settings.rootfile_postfixes = ['.png','.pdf']\n\nvarial.settings.git_tag = varial.settings.readgittag('./GITTAGGER_LOG.txt')\n\ncurrent_tag = varial.settings.git_tag\n\n# sample definitions\nsmpls = list()\n\n\nsmpls.append(Sample(\n name='QCD',\n legend='QCD'\n))\n\nsmpls.append(Sample(\n name='TTJets',\n legend='TTJets'\n))\n\nsmpls.append(Sample(\n name='WJets',\n legend='WJets'\n))\n\nsmpls.append(Sample(\n name='ZJets',\n legend='ZJets'\n))\n\nanalysis.all_samples = dict((s.name, s) for s in smpls)\n\nvarial.settings.defaults_Legend['x_pos'] = 0.80\nvarial.settings.defaults_Legend['label_width'] = 0.36\nvarial.settings.defaults_Legend['label_height'] = 0.03\n# varial.settings.debug_mode = True\nvarial.settings.box_text_size = 0.03\nvarial.settings.colors = {\n 'TTJets': 632, \n 'WJets': 878,\n 'ZJets': 596, \n 'TpTp_M1000': 870, \n # 'TpJ_TH_M800_NonTlep': 434,\n}\n\n# SELECT HISTOGRAMS TO PLOT HERE!\n\n# use these functions to specifically select histograms for plotting\ncurrent_cuts = ['AfterPresel', 'FullSelection'] # 'Nminus1-MuonPtCut', 'OneCut-HTCut', 'FullSelection', 'Nminus1-6OneHiggsTagCut'\ncurrent_hists = ['/EventHists', '/MuonHists'] # \"/ElectronHists\", '/MuonHists', '/JetHists', '/TopJetHists', '/EventHists', '/GenHists/w_decay_lin', '/GenHists/w_decay_log'\n\nuse_cuts = False\nuse_histos = False\n\nvarial.settings.stacking_order = ['ZJets', 'WJets', 'TTJets']\n\ndef select_histograms(wrp):\n use_this = True\n if use_cuts and all('NoGenSel-'+c not in wrp.in_file_path for c in current_cuts):\n use_this = False\n if wrp.name.startswith('cf_'):\n use_this = False\n if use_histos and all(c not in wrp.in_file_path for c in current_hists):\n use_this = False\n # if ('GenHists' in wrp.in_file_path and not (wrp.name.startswith('mu_') or wrp.name.startswith('genjet_'))):\n # use_this = False\n # if 'GenHists' in wrp.in_file_path and ('NoCuts' not in wrp.in_file_path and 'Nminus1-BTagCut' not in wrp.in_file_path):\n # use_this = False\n return use_this\n\ndef select_splithistograms(wrp):\n use_this = True\n if use_cuts and all('NoGenSel-'+c not in wrp.in_file_path for c in current_cuts):\n use_this = False\n if wrp.name.startswith('cf_'):\n use_this = False\n if use_histos and all(c not in wrp.in_file_path for c in current_hists):\n use_this = False\n # if ('GenHists' in wrp.in_file_path and not (wrp.name.startswith('mu_') or wrp.name.startswith('genjet_'))):\n # use_this = False\n # if 'GenHists' in wrp.in_file_path and ('NoCuts' not in wrp.in_file_path and 'Nminus1-BTagCut' not in wrp.in_file_path):\n # use_this = False\n return use_this \n\n\n\n# SOME FUNCTIONS TO MANIPULATE HISTOGRAMS\n\ndef norm_to_first_bin(wrp):\n histo = wrp.histo.Clone()\n firstbin = histo.GetBinContent(1)\n histo.Scale(1. / firstbin)\n info = wrp.all_info()\n info[\"lumi\"] /= firstbin\n return varial.wrappers.HistoWrapper(histo, **info)\n\ndef norm_histos_to_first_bin(wrps):\n for wrp in wrps:\n if isinstance(wrp, varial.wrappers.HistoWrapper):\n yield norm_to_first_bin(wrp)\n else:\n yield wrp\n\ndef norm_histos_to_integral(wrps):\n for wrp in wrps:\n if isinstance(wrp, varial.wrappers.HistoWrapper):\n yield varial.operations.norm_to_integral(wrp)\n else:\n yield wrp\n\n\ndef label_axes(wrps):\n for w in wrps:\n if 'TH1' in w.type and w.histo.GetXaxis().GetTitle() == '':\n w.histo.GetXaxis().SetTitle(w.histo.GetTitle())\n w.histo.GetYaxis().SetTitle('events')\n w.histo.SetTitle('')\n yield w\n\ndef norm_cf_plots(wrps):\n for w in wrps:\n if w.name.startswith('cf_') and isinstance(w, varial.wrappers.HistoWrapper):\n yield varial.operations.norm_to_integral(w)\n else:\n yield w\n\n\n# HOOK FUNCTIONS FOR PLOTTER_FACTORIES; manipulate histograms here\n\ndef for_stacked_hook(wrps):\n # wrps = norm_cf_plots(wrps)\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = gen.gen_add_wrp_info(\n wrps,\n sample=lambda w: w.file_path.split('.')[-2],\n analyzer=lambda w: w.in_file_path[0],\n legend=lambda w: w.sample,\n is_signal=lambda w: 'TpTp_M' in w.sample,\n lumi=lambda w: 1.\n )\n # wrps = gen.imap_conditional(wrps, lambda w: 'TpJ_TH_M800' in w.sample, gen.op.norm_to_lumi)\n wrps = label_axes(wrps)\n return wrps\n\ndef norm_cf_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = norm_histos_to_first_bin(wrps)\n wrps = label_axes(wrps)\n return wrps\n\ndef do_nothing_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = label_axes(wrps)\n return wrps\n\ndef for_eff_plots_hook(wrps):\n wrps = itertools.ifilter(lambda w: w.histo.Integral(), wrps)\n wrps = gen.gen_add_wrp_info(\n wrps,\n sample=lambda w: w.file_path.split('.')[-2],\n analyzer=lambda w: w.in_file_path[0],\n legend=lambda w: ('100* ' if 'TpTp_M' in w.sample else '') + w.sample,\n is_signal=lambda w: 'TpTp_M' in w.sample,\n lumi=lambda w: 0.01 if 'TpTp_M' in w.sample else 1.\n )\n wrps = gen.gen_make_eff_graphs(wrps)\n wrps = label_axes(wrps)\n return wrps\n\n\n# def calc_stack_order(wrps):\n# for w in wrps:\n\n\n# def stack_by_max(wrps):\n# wrps = calc_stack_order(wrps)\n# wrps = gen.mc_stack_n_data_sum(wrps)\n# return wrps\n\n\n# PLOTTER FACTORIES; select here in general which histograms to plot, how to manipulate them a.s.o.\n\ndef stack_histos_factory(**kws):\n kws['filter_keyfunc'] = lambda w: 'TH1' in w.type\n kws['hook_loaded_histos'] = for_stacked_hook\n kws['plot_setup'] = gen.mc_stack_n_data_sum\n kws['save_lin_log_scale'] = True\n # kws['save_log_scale'] = True\n # kws['hook_canvas_pre_build'] = canvas_hook\n # kws['hook_canvas_post_build'] = canvas_hook\n return varial.tools.Plotter(**kws)\n\ndef norm_cf_factory(**kws):\n # kws['filter_keyfunc'] = lambda w: 'TH1' in w.type\n kws['hook_loaded_histos'] = norm_cf_hook\n kws['save_lin_log_scale'] = True\n kws['save_name_func'] = lambda w : w.name + '_norm'\n # kws['save_log_scale'] = True\n # kws['hook_canvas_pre_build'] = canvas_hook\n # kws['hook_canvas_post_build'] = canvas_hook\n return varial.tools.Plotter(**kws)\n\ndef do_nothing_factory(**kws):\n # kws['filter_keyfunc'] = lambda w: 'TH1' in w.type\n kws['hook_loaded_histos'] = do_nothing_hook\n kws['save_lin_log_scale'] = True\n # kws['save_log_scale'] = True\n # kws['hook_canvas_pre_build'] = canvas_hook\n # kws['hook_canvas_post_build'] = canvas_hook\n return varial.tools.Plotter(**kws)\n\ndef for_eff_factory(**kws):\n kws['filter_keyfunc'] = lambda w: 'TH1' in w.type\n kws['hook_loaded_histos'] = for_eff_plots_hook\n kws['save_lin_log_scale'] = True\n # kws['save_log_scale'] = True\n # kws['hook_canvas_pre_build'] = canvas_hook\n # kws['hook_canvas_post_build'] = canvas_hook\n return varial.tools.Plotter(**kws)\n\ndef create_name(name):\n return name+'v'+varial.settings.git_tag\n\n \n\ntagger = varial.tools.GitTagger('./GITTAGGER_LOG.txt')\n\ntagger.run()\n\n\n\np1 = varial.tools.mk_rootfile_plotter(\n name=create_name(dirname),\n # filter_keyfunc=lambda w: not w.name.startswith('cf_'),\n filter_keyfunc=select_histograms,\n plotter_factory=stack_histos_factory,\n combine_files=True\n)\n\np2 = varial.tools.mk_rootfile_plotter(\n name=create_name(dirname),\n filter_keyfunc=lambda w: w.name.startswith('cf_') and not w.name.endswith('raw'),\n plotter_factory=norm_cf_factory,\n combine_files=True\n)\n\np3 = varial.tools.mk_rootfile_plotter(\n name=create_name(dirname),\n filter_keyfunc=lambda w: w.name.startswith('cf_') and not w.name.endswith('raw'),\n plotter_factory=do_nothing_factory,\n combine_files=True\n)\n\np4 = varial.tools.mk_rootfile_plotter(\n name=create_name(dirname)+'split',\n pattern='v1.19_unmerged_files/*.root',\n filter_keyfunc=select_splithistograms,\n plotter_factory=for_eff_factory,\n combine_files=False\n)\n\np5 = varial.tools.mk_rootfile_plotter(\n name=create_name(dirname)+'split',\n # filter_keyfunc=lambda w: not w.name.startswith('cf_'),\n filter_keyfunc=select_splithistograms,\n plotter_factory=for_eff_factory,\n combine_files=True\n)\n\np6 = varial.tools.mk_rootfile_plotter(\n name=create_name(dirname)+'split',\n pattern='v1.19_unmerged_files/*.root',\n filter_keyfunc=lambda w: w.name.startswith('cf_') and not w.name.endswith('raw'),\n plotter_factory=norm_cf_factory,\n combine_files=False\n)\n\np7 = varial.tools.mk_rootfile_plotter(\n name=create_name(dirname)+'split',\n pattern='v1.19_unmerged_files/*.root',\n filter_keyfunc=lambda w: w.name.startswith('cf_') and not w.name.endswith('raw'),\n plotter_factory=do_nothing_factory,\n combine_files=False\n)\n\ntime.sleep(1)\np1.run()\np2.run()\np3.run()\n# p4.run()\np5.run()\n# p6.run()\n# p7.run()\nvarial.tools.WebCreator().run()\n# os.system('rm -r ~/www/TprimeAnalysis/%s' % create_name(dirname))\n# os.system('cp -r %s ~/www/TprimeAnalysis/' % create_name(dirname))\n",
"step-ids": [
10,
13,
14,
18,
20
]
}
|
[
10,
13,
14,
18,
20
] |
#!/usr/bin/python
# ~~~~~============== HOW TO RUN ==============~~~~~
# 1) Configure things in CONFIGURATION section
# 2) Change permissions: chmod +x bot.py
# 3) Run in loop: while true; do ./bot.py; sleep 1; done
from __future__ import print_function
import sys
import socket
import json
import time
# ~~~~~============== CONFIGURATION ==============~~~~~
# replace REPLACEME with your team name!
team_name="BULBASAUR"
# This variable dictates whether or not the bot is connecting to the prod
# or test exchange. Be careful with this switch!
test_mode = True
# This setting changes which test exchange is connected to.
# 0 is prod-like
# 1 is slower
# 2 is empty
test_exchange_index=0
prod_exchange_hostname="production"
port=25000 + (test_exchange_index if test_mode else 0)
exchange_hostname = "test-exch-" + team_name if test_mode else prod_exchange_hostname
# ~~~~~============== NETWORKING CODE ==============~~~~~
def connect():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((exchange_hostname, port))
return s.makefile('rw', 1)
def write_to_exchange(exchange, obj):
json.dump(obj, exchange)
exchange.write("\n")
def read_from_exchange(exchange):
return json.loads(exchange.readline())
# ~~~~~============== MAIN LOOP ==============~~~~~
exchange = None
orders_placed = 0
pending_orders = []
pending_buy_orders = {"BOND": 0, "VALBZ": 0, "VALE": 0, "XLF": 0}
pending_sell_orders = {"BOND": 0, "VALBZ": 0, "VALE": 0, "XLF": 0}
positions = {"BOND": 0, "VALBZ": 0, "VALE": 0, "XLF": 0}
vale_buy_pending_id = None
vale_sell_pending_id = None
vale_sell = 0
vale_buy = 0
xlf_buy_pending_id = None
xlf_sell_pending_id = None
xlf_sell = 0
xlf_buy = 0
def main():
global exchange
exchange = connect()
hello()
hello_from_exchange = read_from_exchange(exchange)
# A common mistake people make is to call write_to_exchange() > 1
# time for every read_from_exchange() response.
# Since many write messages generate marketdata, this will cause an
# exponential explosion in pending messages. Please, don't do that!
print("The exchange replied:", hello_from_exchange, file=sys.stderr)
global positions
positions["BOND"] = hello_from_exchange["symbols"][0]["position"]
positions["VALE"] = hello_from_exchange["symbols"][5]["position"]
positions["VALBZ"] = hello_from_exchange["symbols"][4]["position"]
positions["XLF"] = hello_from_exchange["symbols"][7]["position"]
add("BOND", "BUY", 999, 100 - positions["BOND"])
add("BOND", "SELL", 1001, 100 + positions["BOND"])
while (True):
server_msg = read_from_exchange(exchange)
buy_sell_vale()
buy_sell_xlf()
listen_for_fills(server_msg)
listen_for_book(server_msg)
listen_for_errors(server_msg)
def hello():
write_to_exchange(exchange, {"type": "hello", "team": team_name.upper()})
def add(symbol, direction, price, size):
# Update order id to be order placed number
global orders_placed
orders_placed += 1
# Add to pending orders list
global pending_orders
pending_orders.append(orders_placed)
#if symbol == "VALE":
print("Order Placed: " + str(orders_placed) + " Position: " + str(positions[symbol])+ " Size: " + str(size) + " Dir: " + direction + " Symbol: " + symbol + " Price: " + str(price) + "")
# Increment Buy Orders If Necessary
if (direction == "BUY"):
global pending_buy_orders
pending_buy_orders[symbol] += size
elif (direction == "SELL"):
global pending_sell_orders
pending_sell_orders[symbol] += size
# Add order to exchange
write_to_exchange(exchange, {"type": "add", "order_id": orders_placed, "symbol": symbol,
"dir":direction, "price":price, "size": size })
#
read_from_exchange(exchange)
def cancel(order_id):
write_to_exchange(exchange, {"type": "cancel", "order_id": order_id})
def listen_for_fills(server_msg):
if (server_msg["type"] == "fill"):
# Get info of filled order
order_num = server_msg["order_id"]
symbol = server_msg["symbol"]
size = server_msg["size"]
direction = server_msg["dir"]
global positions
# Update bond order fill and buy/sell as necessary
if (symbol == "BOND"):
# print("Bond Order Partially Filled: " + str(order_num))
if (direction == "BUY"):
pending_buy_orders[symbol] -= size
add("BOND", "SELL", 1001, size)
elif (direction == "SELL"):
pending_sell_orders[symbol] -= size
add("BOND", "BUY", 999, size)
# Update Vale Order fill and hedge as necessary
if (symbol == "VALE"):
print("Vale Order Filled: " + str(order_num) + " " + direction + " Size: " + str(size))
if (direction == "BUY"):
pending_buy_orders[symbol] -= size
positions["VALE"] += size
elif (direction == "SELL"):
positions["VALE"] -= size
pending_sell_orders[symbol] -= size
if (symbol == "XLF"):
print("XLF Order Filled: " + str(order_num) + " " + direction + " Size: " + str(size))
if (direction == "BUY"):
pending_buy_orders[symbol] -= size
positions["XLF"] += size
elif (direction == "SELL"):
positions["XLF"] -= size
pending_sell_orders[symbol] -= size
def listen_for_book(server_msg):
if (server_msg["type"] == "book"):
global vale_sell
global vale_buy
global xlf_sell
global xlf_buy
if (server_msg["symbol"] == "VALE"):
if len(server_msg["sell"]) > 0:
vale_sell = server_msg["sell"][0][0]
if len(server_msg["buy"]) > 0:
vale_buy = server_msg["buy"][0][0]
if (server_msg["symbol"] == "XLF"):
if len(server_msg["sell"]) > 0:
xlf_sell = server_msg["sell"][0][0]
if len(server_msg["buy"]) > 0:
xlf_buy = server_msg["buy"][0][0]
def buy_sell_vale():
if vale_buy > 0 and vale_sell > 0:
global pending_sell_orders
global pending_buy_orders
if ( pending_buy_orders["VALE"] + positions["VALE"] < 10):
global vale_buy_pending_id
if vale_buy_pending_id:
cancel(vale_buy_pending_id)
pending_buy_orders["VALE"] = 0
vale_buy_pending_id = None
print("Cancel VALE BUY Order: " + str(orders_placed))
time.sleep(1)
num_stock = 10 - positions["VALE"]
add("VALE", "BUY", vale_buy + 1, 10 - positions["VALE"])
vale_buy_pending_id = orders_placed
elif (positions["VALE"] - pending_sell_orders["VALE"] > -10):
global vale_sell_pending_id
if vale_sell_pending_id:
print("Cancel VALE Sell Order: " + str(orders_placed))
cancel(vale_sell_pending_id)
pending_sell_orders["VALE"] = 0
vale_sell_pending_id = None
time.sleep(1)
num_stock = 10 - positions["VALE"]
add("VALE", "SELL", vale_sell - 1, num_stock)
vale_sell_pending_id = orders_placed
def buy_sell_xlf():
if xlf_buy > 0 and xlf_sell > 0:
global pending_sell_orders
global pending_buy_orders
if ( pending_buy_orders["XLF"] + positions["XLF"] < 100):
global xlf_buy_pending_id
if xlf_buy_pending_id:
cancel(xlf_buy_pending_id)
pending_buy_orders["XLF"] = 0
xlf_buy_pending_id = None
print("Cancel XLF Order: " + str(orders_placed))
time.sleep(1)
add("XLF", "BUY", xlf_buy + 1, 100 - positions["XLF"])
xlf_buy_pending_id = orders_placed
elif (positions["XLF"] - pending_sell_orders["XLF"] > -100):
global xlf_sell_pending_id
if xlf_sell_pending_id:
print("Cancel XLF Order: " + str(orders_placed))
cancel(xlf_sell_pending_id)
pending_sell_orders["XLF"] = 0
xlf_sell_pending_id = None
time.sleep(1)
add("XLF", "SELL", xlf_sell - 1, 100 + positions["XLF"])
xlf_sell_pending_id = orders_placed
def listen_for_errors(server_msg):
if (server_msg["type"] == "reject"):
print("ERROR: ORDER FAILED, id: " + str(server_msg["order_id"]) + " " + server_msg["error"])
if (server_msg["type"] == "error"):
print("ERROR: ORDER FAILED, id: " + str(id) + " " + server_msg["error"])
if (server_msg["type"] == "ack"):
print("Order Completed: " + str(server_msg["order_id"]))
if (server_msg["type"] == "out"):
print("Order Successfully Canceled: " + str(server_msg["order_id"]))
#add("BOND", "BUY", 999, 100 - positions["BOND"])
#add("BOND", "SELL", 1001, 100 + positions["BOND"])
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "56c5c515de8490f2e3516563e037c375aba03667",
"index": 3221,
"step-1": "<mask token>\n\n\ndef connect():\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((exchange_hostname, port))\n return s.makefile('rw', 1)\n\n\ndef write_to_exchange(exchange, obj):\n json.dump(obj, exchange)\n exchange.write('\\n')\n\n\n<mask token>\n\n\ndef hello():\n write_to_exchange(exchange, {'type': 'hello', 'team': team_name.upper()})\n\n\ndef add(symbol, direction, price, size):\n global orders_placed\n orders_placed += 1\n global pending_orders\n pending_orders.append(orders_placed)\n print('Order Placed: ' + str(orders_placed) + ' Position: ' + str(\n positions[symbol]) + ' Size: ' + str(size) + ' Dir: ' + direction +\n ' Symbol: ' + symbol + ' Price: ' + str(price) + '')\n if direction == 'BUY':\n global pending_buy_orders\n pending_buy_orders[symbol] += size\n elif direction == 'SELL':\n global pending_sell_orders\n pending_sell_orders[symbol] += size\n write_to_exchange(exchange, {'type': 'add', 'order_id': orders_placed,\n 'symbol': symbol, 'dir': direction, 'price': price, 'size': size})\n read_from_exchange(exchange)\n\n\n<mask token>\n\n\ndef buy_sell_xlf():\n if xlf_buy > 0 and xlf_sell > 0:\n global pending_sell_orders\n global pending_buy_orders\n if pending_buy_orders['XLF'] + positions['XLF'] < 100:\n global xlf_buy_pending_id\n if xlf_buy_pending_id:\n cancel(xlf_buy_pending_id)\n pending_buy_orders['XLF'] = 0\n xlf_buy_pending_id = None\n print('Cancel XLF Order: ' + str(orders_placed))\n time.sleep(1)\n add('XLF', 'BUY', xlf_buy + 1, 100 - positions['XLF'])\n xlf_buy_pending_id = orders_placed\n elif positions['XLF'] - pending_sell_orders['XLF'] > -100:\n global xlf_sell_pending_id\n if xlf_sell_pending_id:\n print('Cancel XLF Order: ' + str(orders_placed))\n cancel(xlf_sell_pending_id)\n pending_sell_orders['XLF'] = 0\n xlf_sell_pending_id = None\n time.sleep(1)\n add('XLF', 'SELL', xlf_sell - 1, 100 + positions['XLF'])\n xlf_sell_pending_id = orders_placed\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef connect():\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((exchange_hostname, port))\n return s.makefile('rw', 1)\n\n\ndef write_to_exchange(exchange, obj):\n json.dump(obj, exchange)\n exchange.write('\\n')\n\n\ndef read_from_exchange(exchange):\n return json.loads(exchange.readline())\n\n\n<mask token>\n\n\ndef hello():\n write_to_exchange(exchange, {'type': 'hello', 'team': team_name.upper()})\n\n\ndef add(symbol, direction, price, size):\n global orders_placed\n orders_placed += 1\n global pending_orders\n pending_orders.append(orders_placed)\n print('Order Placed: ' + str(orders_placed) + ' Position: ' + str(\n positions[symbol]) + ' Size: ' + str(size) + ' Dir: ' + direction +\n ' Symbol: ' + symbol + ' Price: ' + str(price) + '')\n if direction == 'BUY':\n global pending_buy_orders\n pending_buy_orders[symbol] += size\n elif direction == 'SELL':\n global pending_sell_orders\n pending_sell_orders[symbol] += size\n write_to_exchange(exchange, {'type': 'add', 'order_id': orders_placed,\n 'symbol': symbol, 'dir': direction, 'price': price, 'size': size})\n read_from_exchange(exchange)\n\n\ndef cancel(order_id):\n write_to_exchange(exchange, {'type': 'cancel', 'order_id': order_id})\n\n\ndef listen_for_fills(server_msg):\n if server_msg['type'] == 'fill':\n order_num = server_msg['order_id']\n symbol = server_msg['symbol']\n size = server_msg['size']\n direction = server_msg['dir']\n global positions\n if symbol == 'BOND':\n if direction == 'BUY':\n pending_buy_orders[symbol] -= size\n add('BOND', 'SELL', 1001, size)\n elif direction == 'SELL':\n pending_sell_orders[symbol] -= size\n add('BOND', 'BUY', 999, size)\n if symbol == 'VALE':\n print('Vale Order Filled: ' + str(order_num) + ' ' + direction +\n ' Size: ' + str(size))\n if direction == 'BUY':\n pending_buy_orders[symbol] -= size\n positions['VALE'] += size\n elif direction == 'SELL':\n positions['VALE'] -= size\n pending_sell_orders[symbol] -= size\n if symbol == 'XLF':\n print('XLF Order Filled: ' + str(order_num) + ' ' + direction +\n ' Size: ' + str(size))\n if direction == 'BUY':\n pending_buy_orders[symbol] -= size\n positions['XLF'] += size\n elif direction == 'SELL':\n positions['XLF'] -= size\n pending_sell_orders[symbol] -= size\n\n\n<mask token>\n\n\ndef buy_sell_xlf():\n if xlf_buy > 0 and xlf_sell > 0:\n global pending_sell_orders\n global pending_buy_orders\n if pending_buy_orders['XLF'] + positions['XLF'] < 100:\n global xlf_buy_pending_id\n if xlf_buy_pending_id:\n cancel(xlf_buy_pending_id)\n pending_buy_orders['XLF'] = 0\n xlf_buy_pending_id = None\n print('Cancel XLF Order: ' + str(orders_placed))\n time.sleep(1)\n add('XLF', 'BUY', xlf_buy + 1, 100 - positions['XLF'])\n xlf_buy_pending_id = orders_placed\n elif positions['XLF'] - pending_sell_orders['XLF'] > -100:\n global xlf_sell_pending_id\n if xlf_sell_pending_id:\n print('Cancel XLF Order: ' + str(orders_placed))\n cancel(xlf_sell_pending_id)\n pending_sell_orders['XLF'] = 0\n xlf_sell_pending_id = None\n time.sleep(1)\n add('XLF', 'SELL', xlf_sell - 1, 100 + positions['XLF'])\n xlf_sell_pending_id = orders_placed\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef connect():\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((exchange_hostname, port))\n return s.makefile('rw', 1)\n\n\ndef write_to_exchange(exchange, obj):\n json.dump(obj, exchange)\n exchange.write('\\n')\n\n\ndef read_from_exchange(exchange):\n return json.loads(exchange.readline())\n\n\n<mask token>\n\n\ndef hello():\n write_to_exchange(exchange, {'type': 'hello', 'team': team_name.upper()})\n\n\ndef add(symbol, direction, price, size):\n global orders_placed\n orders_placed += 1\n global pending_orders\n pending_orders.append(orders_placed)\n print('Order Placed: ' + str(orders_placed) + ' Position: ' + str(\n positions[symbol]) + ' Size: ' + str(size) + ' Dir: ' + direction +\n ' Symbol: ' + symbol + ' Price: ' + str(price) + '')\n if direction == 'BUY':\n global pending_buy_orders\n pending_buy_orders[symbol] += size\n elif direction == 'SELL':\n global pending_sell_orders\n pending_sell_orders[symbol] += size\n write_to_exchange(exchange, {'type': 'add', 'order_id': orders_placed,\n 'symbol': symbol, 'dir': direction, 'price': price, 'size': size})\n read_from_exchange(exchange)\n\n\ndef cancel(order_id):\n write_to_exchange(exchange, {'type': 'cancel', 'order_id': order_id})\n\n\ndef listen_for_fills(server_msg):\n if server_msg['type'] == 'fill':\n order_num = server_msg['order_id']\n symbol = server_msg['symbol']\n size = server_msg['size']\n direction = server_msg['dir']\n global positions\n if symbol == 'BOND':\n if direction == 'BUY':\n pending_buy_orders[symbol] -= size\n add('BOND', 'SELL', 1001, size)\n elif direction == 'SELL':\n pending_sell_orders[symbol] -= size\n add('BOND', 'BUY', 999, size)\n if symbol == 'VALE':\n print('Vale Order Filled: ' + str(order_num) + ' ' + direction +\n ' Size: ' + str(size))\n if direction == 'BUY':\n pending_buy_orders[symbol] -= size\n positions['VALE'] += size\n elif direction == 'SELL':\n positions['VALE'] -= size\n pending_sell_orders[symbol] -= size\n if symbol == 'XLF':\n print('XLF Order Filled: ' + str(order_num) + ' ' + direction +\n ' Size: ' + str(size))\n if direction == 'BUY':\n pending_buy_orders[symbol] -= size\n positions['XLF'] += size\n elif direction == 'SELL':\n positions['XLF'] -= size\n pending_sell_orders[symbol] -= size\n\n\ndef listen_for_book(server_msg):\n if server_msg['type'] == 'book':\n global vale_sell\n global vale_buy\n global xlf_sell\n global xlf_buy\n if server_msg['symbol'] == 'VALE':\n if len(server_msg['sell']) > 0:\n vale_sell = server_msg['sell'][0][0]\n if len(server_msg['buy']) > 0:\n vale_buy = server_msg['buy'][0][0]\n if server_msg['symbol'] == 'XLF':\n if len(server_msg['sell']) > 0:\n xlf_sell = server_msg['sell'][0][0]\n if len(server_msg['buy']) > 0:\n xlf_buy = server_msg['buy'][0][0]\n\n\n<mask token>\n\n\ndef buy_sell_xlf():\n if xlf_buy > 0 and xlf_sell > 0:\n global pending_sell_orders\n global pending_buy_orders\n if pending_buy_orders['XLF'] + positions['XLF'] < 100:\n global xlf_buy_pending_id\n if xlf_buy_pending_id:\n cancel(xlf_buy_pending_id)\n pending_buy_orders['XLF'] = 0\n xlf_buy_pending_id = None\n print('Cancel XLF Order: ' + str(orders_placed))\n time.sleep(1)\n add('XLF', 'BUY', xlf_buy + 1, 100 - positions['XLF'])\n xlf_buy_pending_id = orders_placed\n elif positions['XLF'] - pending_sell_orders['XLF'] > -100:\n global xlf_sell_pending_id\n if xlf_sell_pending_id:\n print('Cancel XLF Order: ' + str(orders_placed))\n cancel(xlf_sell_pending_id)\n pending_sell_orders['XLF'] = 0\n xlf_sell_pending_id = None\n time.sleep(1)\n add('XLF', 'SELL', xlf_sell - 1, 100 + positions['XLF'])\n xlf_sell_pending_id = orders_placed\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef connect():\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((exchange_hostname, port))\n return s.makefile('rw', 1)\n\n\ndef write_to_exchange(exchange, obj):\n json.dump(obj, exchange)\n exchange.write('\\n')\n\n\ndef read_from_exchange(exchange):\n return json.loads(exchange.readline())\n\n\n<mask token>\n\n\ndef hello():\n write_to_exchange(exchange, {'type': 'hello', 'team': team_name.upper()})\n\n\ndef add(symbol, direction, price, size):\n global orders_placed\n orders_placed += 1\n global pending_orders\n pending_orders.append(orders_placed)\n print('Order Placed: ' + str(orders_placed) + ' Position: ' + str(\n positions[symbol]) + ' Size: ' + str(size) + ' Dir: ' + direction +\n ' Symbol: ' + symbol + ' Price: ' + str(price) + '')\n if direction == 'BUY':\n global pending_buy_orders\n pending_buy_orders[symbol] += size\n elif direction == 'SELL':\n global pending_sell_orders\n pending_sell_orders[symbol] += size\n write_to_exchange(exchange, {'type': 'add', 'order_id': orders_placed,\n 'symbol': symbol, 'dir': direction, 'price': price, 'size': size})\n read_from_exchange(exchange)\n\n\ndef cancel(order_id):\n write_to_exchange(exchange, {'type': 'cancel', 'order_id': order_id})\n\n\ndef listen_for_fills(server_msg):\n if server_msg['type'] == 'fill':\n order_num = server_msg['order_id']\n symbol = server_msg['symbol']\n size = server_msg['size']\n direction = server_msg['dir']\n global positions\n if symbol == 'BOND':\n if direction == 'BUY':\n pending_buy_orders[symbol] -= size\n add('BOND', 'SELL', 1001, size)\n elif direction == 'SELL':\n pending_sell_orders[symbol] -= size\n add('BOND', 'BUY', 999, size)\n if symbol == 'VALE':\n print('Vale Order Filled: ' + str(order_num) + ' ' + direction +\n ' Size: ' + str(size))\n if direction == 'BUY':\n pending_buy_orders[symbol] -= size\n positions['VALE'] += size\n elif direction == 'SELL':\n positions['VALE'] -= size\n pending_sell_orders[symbol] -= size\n if symbol == 'XLF':\n print('XLF Order Filled: ' + str(order_num) + ' ' + direction +\n ' Size: ' + str(size))\n if direction == 'BUY':\n pending_buy_orders[symbol] -= size\n positions['XLF'] += size\n elif direction == 'SELL':\n positions['XLF'] -= size\n pending_sell_orders[symbol] -= size\n\n\ndef listen_for_book(server_msg):\n if server_msg['type'] == 'book':\n global vale_sell\n global vale_buy\n global xlf_sell\n global xlf_buy\n if server_msg['symbol'] == 'VALE':\n if len(server_msg['sell']) > 0:\n vale_sell = server_msg['sell'][0][0]\n if len(server_msg['buy']) > 0:\n vale_buy = server_msg['buy'][0][0]\n if server_msg['symbol'] == 'XLF':\n if len(server_msg['sell']) > 0:\n xlf_sell = server_msg['sell'][0][0]\n if len(server_msg['buy']) > 0:\n xlf_buy = server_msg['buy'][0][0]\n\n\n<mask token>\n\n\ndef buy_sell_xlf():\n if xlf_buy > 0 and xlf_sell > 0:\n global pending_sell_orders\n global pending_buy_orders\n if pending_buy_orders['XLF'] + positions['XLF'] < 100:\n global xlf_buy_pending_id\n if xlf_buy_pending_id:\n cancel(xlf_buy_pending_id)\n pending_buy_orders['XLF'] = 0\n xlf_buy_pending_id = None\n print('Cancel XLF Order: ' + str(orders_placed))\n time.sleep(1)\n add('XLF', 'BUY', xlf_buy + 1, 100 - positions['XLF'])\n xlf_buy_pending_id = orders_placed\n elif positions['XLF'] - pending_sell_orders['XLF'] > -100:\n global xlf_sell_pending_id\n if xlf_sell_pending_id:\n print('Cancel XLF Order: ' + str(orders_placed))\n cancel(xlf_sell_pending_id)\n pending_sell_orders['XLF'] = 0\n xlf_sell_pending_id = None\n time.sleep(1)\n add('XLF', 'SELL', xlf_sell - 1, 100 + positions['XLF'])\n xlf_sell_pending_id = orders_placed\n\n\ndef listen_for_errors(server_msg):\n if server_msg['type'] == 'reject':\n print('ERROR: ORDER FAILED, id: ' + str(server_msg['order_id']) +\n ' ' + server_msg['error'])\n if server_msg['type'] == 'error':\n print('ERROR: ORDER FAILED, id: ' + str(id) + ' ' + server_msg['error']\n )\n if server_msg['type'] == 'ack':\n print('Order Completed: ' + str(server_msg['order_id']))\n if server_msg['type'] == 'out':\n print('Order Successfully Canceled: ' + str(server_msg['order_id']))\n\n\n<mask token>\n",
"step-5": "#!/usr/bin/python\n\n# ~~~~~============== HOW TO RUN ==============~~~~~\n# 1) Configure things in CONFIGURATION section\n# 2) Change permissions: chmod +x bot.py\n# 3) Run in loop: while true; do ./bot.py; sleep 1; done\n\nfrom __future__ import print_function\n\nimport sys\nimport socket\nimport json\nimport time\n\n# ~~~~~============== CONFIGURATION ==============~~~~~\n# replace REPLACEME with your team name!\nteam_name=\"BULBASAUR\"\n# This variable dictates whether or not the bot is connecting to the prod\n# or test exchange. Be careful with this switch!\ntest_mode = True\n\n# This setting changes which test exchange is connected to.\n# 0 is prod-like\n# 1 is slower\n# 2 is empty\ntest_exchange_index=0\nprod_exchange_hostname=\"production\"\n\nport=25000 + (test_exchange_index if test_mode else 0)\nexchange_hostname = \"test-exch-\" + team_name if test_mode else prod_exchange_hostname\n\n# ~~~~~============== NETWORKING CODE ==============~~~~~\ndef connect():\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((exchange_hostname, port))\n return s.makefile('rw', 1)\n\ndef write_to_exchange(exchange, obj):\n json.dump(obj, exchange)\n exchange.write(\"\\n\")\n\ndef read_from_exchange(exchange):\n return json.loads(exchange.readline())\n\n\n# ~~~~~============== MAIN LOOP ==============~~~~~\nexchange = None\norders_placed = 0\npending_orders = []\npending_buy_orders = {\"BOND\": 0, \"VALBZ\": 0, \"VALE\": 0, \"XLF\": 0}\npending_sell_orders = {\"BOND\": 0, \"VALBZ\": 0, \"VALE\": 0, \"XLF\": 0}\npositions = {\"BOND\": 0, \"VALBZ\": 0, \"VALE\": 0, \"XLF\": 0}\nvale_buy_pending_id = None\nvale_sell_pending_id = None\nvale_sell = 0\nvale_buy = 0\n\nxlf_buy_pending_id = None\nxlf_sell_pending_id = None\nxlf_sell = 0\nxlf_buy = 0\n\ndef main():\n global exchange\n exchange = connect()\n hello()\n hello_from_exchange = read_from_exchange(exchange)\n # A common mistake people make is to call write_to_exchange() > 1\n # time for every read_from_exchange() response.\n # Since many write messages generate marketdata, this will cause an\n # exponential explosion in pending messages. Please, don't do that!\n print(\"The exchange replied:\", hello_from_exchange, file=sys.stderr)\n global positions\n positions[\"BOND\"] = hello_from_exchange[\"symbols\"][0][\"position\"]\n positions[\"VALE\"] = hello_from_exchange[\"symbols\"][5][\"position\"]\n positions[\"VALBZ\"] = hello_from_exchange[\"symbols\"][4][\"position\"]\n positions[\"XLF\"] = hello_from_exchange[\"symbols\"][7][\"position\"]\n\n add(\"BOND\", \"BUY\", 999, 100 - positions[\"BOND\"])\n add(\"BOND\", \"SELL\", 1001, 100 + positions[\"BOND\"])\n\n while (True):\n server_msg = read_from_exchange(exchange)\n buy_sell_vale()\n buy_sell_xlf()\n listen_for_fills(server_msg)\n listen_for_book(server_msg)\n listen_for_errors(server_msg)\n \ndef hello():\n write_to_exchange(exchange, {\"type\": \"hello\", \"team\": team_name.upper()})\n\ndef add(symbol, direction, price, size):\n # Update order id to be order placed number\n global orders_placed\n orders_placed += 1\n # Add to pending orders list\n global pending_orders\n pending_orders.append(orders_placed)\n #if symbol == \"VALE\":\n print(\"Order Placed: \" + str(orders_placed) + \" Position: \" + str(positions[symbol])+ \" Size: \" + str(size) + \" Dir: \" + direction + \" Symbol: \" + symbol + \" Price: \" + str(price) + \"\")\n\n # Increment Buy Orders If Necessary\n if (direction == \"BUY\"):\n global pending_buy_orders\n pending_buy_orders[symbol] += size\n elif (direction == \"SELL\"):\n global pending_sell_orders\n pending_sell_orders[symbol] += size\n # Add order to exchange\n write_to_exchange(exchange, {\"type\": \"add\", \"order_id\": orders_placed, \"symbol\": symbol,\n \"dir\":direction, \"price\":price, \"size\": size })\n # \n read_from_exchange(exchange)\n\ndef cancel(order_id):\n write_to_exchange(exchange, {\"type\": \"cancel\", \"order_id\": order_id}) \n\ndef listen_for_fills(server_msg):\n if (server_msg[\"type\"] == \"fill\"):\n # Get info of filled order\n order_num = server_msg[\"order_id\"]\n symbol = server_msg[\"symbol\"]\n size = server_msg[\"size\"]\n direction = server_msg[\"dir\"]\n global positions\n # Update bond order fill and buy/sell as necessary\n if (symbol == \"BOND\"):\n # print(\"Bond Order Partially Filled: \" + str(order_num))\n if (direction == \"BUY\"):\n pending_buy_orders[symbol] -= size\n add(\"BOND\", \"SELL\", 1001, size)\n elif (direction == \"SELL\"):\n pending_sell_orders[symbol] -= size\n add(\"BOND\", \"BUY\", 999, size)\n # Update Vale Order fill and hedge as necessary\n if (symbol == \"VALE\"):\n print(\"Vale Order Filled: \" + str(order_num) + \" \" + direction + \" Size: \" + str(size))\n if (direction == \"BUY\"):\n pending_buy_orders[symbol] -= size\n positions[\"VALE\"] += size\n elif (direction == \"SELL\"):\n positions[\"VALE\"] -= size\n pending_sell_orders[symbol] -= size\n if (symbol == \"XLF\"):\n print(\"XLF Order Filled: \" + str(order_num) + \" \" + direction + \" Size: \" + str(size))\n if (direction == \"BUY\"):\n pending_buy_orders[symbol] -= size\n positions[\"XLF\"] += size\n elif (direction == \"SELL\"):\n positions[\"XLF\"] -= size\n pending_sell_orders[symbol] -= size\n\ndef listen_for_book(server_msg):\n if (server_msg[\"type\"] == \"book\"):\n global vale_sell\n global vale_buy\n global xlf_sell\n global xlf_buy\n if (server_msg[\"symbol\"] == \"VALE\"):\n if len(server_msg[\"sell\"]) > 0:\n vale_sell = server_msg[\"sell\"][0][0]\n if len(server_msg[\"buy\"]) > 0:\n vale_buy = server_msg[\"buy\"][0][0]\n if (server_msg[\"symbol\"] == \"XLF\"):\n if len(server_msg[\"sell\"]) > 0:\n xlf_sell = server_msg[\"sell\"][0][0]\n if len(server_msg[\"buy\"]) > 0:\n xlf_buy = server_msg[\"buy\"][0][0]\n\ndef buy_sell_vale():\n if vale_buy > 0 and vale_sell > 0:\n global pending_sell_orders\n global pending_buy_orders\n if ( pending_buy_orders[\"VALE\"] + positions[\"VALE\"] < 10):\n global vale_buy_pending_id\n if vale_buy_pending_id:\n cancel(vale_buy_pending_id)\n pending_buy_orders[\"VALE\"] = 0\n vale_buy_pending_id = None\n print(\"Cancel VALE BUY Order: \" + str(orders_placed))\n time.sleep(1)\n num_stock = 10 - positions[\"VALE\"]\n add(\"VALE\", \"BUY\", vale_buy + 1, 10 - positions[\"VALE\"])\n\n vale_buy_pending_id = orders_placed\n elif (positions[\"VALE\"] - pending_sell_orders[\"VALE\"] > -10):\n global vale_sell_pending_id\n if vale_sell_pending_id:\n print(\"Cancel VALE Sell Order: \" + str(orders_placed))\n cancel(vale_sell_pending_id)\n pending_sell_orders[\"VALE\"] = 0\n vale_sell_pending_id = None\n time.sleep(1)\n num_stock = 10 - positions[\"VALE\"]\n add(\"VALE\", \"SELL\", vale_sell - 1, num_stock)\n vale_sell_pending_id = orders_placed\n\ndef buy_sell_xlf():\n if xlf_buy > 0 and xlf_sell > 0:\n global pending_sell_orders\n global pending_buy_orders\n if ( pending_buy_orders[\"XLF\"] + positions[\"XLF\"] < 100):\n global xlf_buy_pending_id\n if xlf_buy_pending_id:\n cancel(xlf_buy_pending_id)\n pending_buy_orders[\"XLF\"] = 0\n xlf_buy_pending_id = None\n print(\"Cancel XLF Order: \" + str(orders_placed))\n time.sleep(1)\n add(\"XLF\", \"BUY\", xlf_buy + 1, 100 - positions[\"XLF\"])\n xlf_buy_pending_id = orders_placed\n elif (positions[\"XLF\"] - pending_sell_orders[\"XLF\"] > -100):\n global xlf_sell_pending_id\n if xlf_sell_pending_id:\n print(\"Cancel XLF Order: \" + str(orders_placed))\n cancel(xlf_sell_pending_id)\n pending_sell_orders[\"XLF\"] = 0\n xlf_sell_pending_id = None\n time.sleep(1)\n add(\"XLF\", \"SELL\", xlf_sell - 1, 100 + positions[\"XLF\"])\n xlf_sell_pending_id = orders_placed\n\ndef listen_for_errors(server_msg):\n if (server_msg[\"type\"] == \"reject\"):\n print(\"ERROR: ORDER FAILED, id: \" + str(server_msg[\"order_id\"]) + \" \" + server_msg[\"error\"])\n if (server_msg[\"type\"] == \"error\"):\n print(\"ERROR: ORDER FAILED, id: \" + str(id) + \" \" + server_msg[\"error\"])\n if (server_msg[\"type\"] == \"ack\"):\n print(\"Order Completed: \" + str(server_msg[\"order_id\"]))\n if (server_msg[\"type\"] == \"out\"):\n print(\"Order Successfully Canceled: \" + str(server_msg[\"order_id\"]))\n\n #add(\"BOND\", \"BUY\", 999, 100 - positions[\"BOND\"])\n #add(\"BOND\", \"SELL\", 1001, 100 + positions[\"BOND\"])\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
5,
8,
9,
10,
16
]
}
|
[
5,
8,
9,
10,
16
] |
<|reserved_special_token_0|>
class If_Statement(Instruction):
def __init__(self, row, column, expBool, elseif_list, else_, stmts) ->None:
super().__init__(row, column)
self.expBool = expBool
self.elseif_list = elseif_list
self.else_ = else_
self.stmts = stmts
<|reserved_special_token_0|>
def p_if_sum(self):
if grammar.if_stmt != 0:
grammar.back_fill.new_lists()
grammar.if_stmt += 1
def p_if_rest(self):
grammar.if_stmt -= 1
def p_iev(self):
return grammar.back_fill.take_out_true_list(self.row)
def p_fev(self):
return grammar.back_fill.take_out_false_list(self.row)
<|reserved_special_token_0|>
def p_write_next_etiq(self):
val = '\tlabel .etiqS' + str(grammar.next_etiq) + '\n'
grammar.optimizer_.addLabel(str('etiqS' + str(grammar.next_etiq)),
self.row)
grammar.next_etiq += 1
return val
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class If_Statement(Instruction):
def __init__(self, row, column, expBool, elseif_list, else_, stmts) ->None:
super().__init__(row, column)
self.expBool = expBool
self.elseif_list = elseif_list
self.else_ = else_
self.stmts = stmts
def execute(self, environment):
self.p_if_sum()
boolCode = self.expBool.execute(environment)
cod3d = boolCode.value
cod3d += '\tif ' + str(boolCode.temp) + ': goto .etiv' + str(
grammar.current_etiq + 1) + '\n'
grammar.optimizer_.addIF(str(boolCode.temp), str('etiv' + str(
grammar.current_etiq + 1)), self.row)
cod3d += '\tgoto .etif' + str(grammar.current_etiq + 2) + '\n'
grammar.optimizer_.addGoto(str('etif' + str(grammar.current_etiq +
2)), self.row)
grammar.back_fill.insert_true(grammar.current_etiq + 1)
grammar.back_fill.insert_false(grammar.current_etiq + 2)
grammar.current_etiq += 2
cod3d += self.p_iev()
codeElseif = ''
for stmt in self.stmts:
cod3d += stmt.execute(environment).value
self.index = grammar.optimizer_.addGoto(str('etiqS' + str(grammar.
next_etiq)), self.row) - 1
if len(self.elseif_list) > 0:
for elseif in self.elseif_list:
codeElseif += elseif.execute(environment).value
cod3d += self.p_fef()
cod3d += codeElseif
if self.else_ != None:
cod3d += self.else_.execute(environment).value
else:
cod3d += self.p_write_next_etiq()
cod3d += self.p_fev()
self.p_if_rest()
return code.C3D(cod3d, 'if', self.row, self.column)
def p_if_sum(self):
if grammar.if_stmt != 0:
grammar.back_fill.new_lists()
grammar.if_stmt += 1
def p_if_rest(self):
grammar.if_stmt -= 1
def p_iev(self):
return grammar.back_fill.take_out_true_list(self.row)
def p_fev(self):
return grammar.back_fill.take_out_false_list(self.row)
def p_fef(self):
val = '\tgoto .etiqS' + str(grammar.next_etiq) + '\n'
grammar.optimizer_.addGoto_IF(str('etiqS' + str(grammar.next_etiq)),
self.row, self.index)
val += grammar.back_fill.take_out_true_list(self.row)
return val
def p_write_next_etiq(self):
val = '\tlabel .etiqS' + str(grammar.next_etiq) + '\n'
grammar.optimizer_.addLabel(str('etiqS' + str(grammar.next_etiq)),
self.row)
grammar.next_etiq += 1
return val
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class If_Statement(Instruction):
def __init__(self, row, column, expBool, elseif_list, else_, stmts) ->None:
super().__init__(row, column)
self.expBool = expBool
self.elseif_list = elseif_list
self.else_ = else_
self.stmts = stmts
def execute(self, environment):
self.p_if_sum()
boolCode = self.expBool.execute(environment)
cod3d = boolCode.value
cod3d += '\tif ' + str(boolCode.temp) + ': goto .etiv' + str(
grammar.current_etiq + 1) + '\n'
grammar.optimizer_.addIF(str(boolCode.temp), str('etiv' + str(
grammar.current_etiq + 1)), self.row)
cod3d += '\tgoto .etif' + str(grammar.current_etiq + 2) + '\n'
grammar.optimizer_.addGoto(str('etif' + str(grammar.current_etiq +
2)), self.row)
grammar.back_fill.insert_true(grammar.current_etiq + 1)
grammar.back_fill.insert_false(grammar.current_etiq + 2)
grammar.current_etiq += 2
cod3d += self.p_iev()
codeElseif = ''
for stmt in self.stmts:
cod3d += stmt.execute(environment).value
self.index = grammar.optimizer_.addGoto(str('etiqS' + str(grammar.
next_etiq)), self.row) - 1
if len(self.elseif_list) > 0:
for elseif in self.elseif_list:
codeElseif += elseif.execute(environment).value
cod3d += self.p_fef()
cod3d += codeElseif
if self.else_ != None:
cod3d += self.else_.execute(environment).value
else:
cod3d += self.p_write_next_etiq()
cod3d += self.p_fev()
self.p_if_rest()
return code.C3D(cod3d, 'if', self.row, self.column)
def p_if_sum(self):
if grammar.if_stmt != 0:
grammar.back_fill.new_lists()
grammar.if_stmt += 1
def p_if_rest(self):
grammar.if_stmt -= 1
def p_iev(self):
return grammar.back_fill.take_out_true_list(self.row)
def p_fev(self):
return grammar.back_fill.take_out_false_list(self.row)
def p_fef(self):
val = '\tgoto .etiqS' + str(grammar.next_etiq) + '\n'
grammar.optimizer_.addGoto_IF(str('etiqS' + str(grammar.next_etiq)),
self.row, self.index)
val += grammar.back_fill.take_out_true_list(self.row)
return val
def p_write_next_etiq(self):
val = '\tlabel .etiqS' + str(grammar.next_etiq) + '\n'
grammar.optimizer_.addLabel(str('etiqS' + str(grammar.next_etiq)),
self.row)
grammar.next_etiq += 1
return val
def dot(self):
new = Nodo('IF')
new.addNode(self.expBool.dot())
then = Nodo('THEN')
new.addNode(then)
for s in self.stmts:
then.addNode(s.dot())
for eif in self.elseif_list:
new.addNode(eif.dot())
if self.else_:
new.addNode(self.else_.dot())
return new
<|reserved_special_token_1|>
from analizer_pl.abstract.instruction import Instruction
from analizer_pl import grammar
from analizer_pl.statement.expressions import code
from analizer_pl.reports.Nodo import Nodo
class If_Statement(Instruction):
def __init__(self, row, column, expBool, elseif_list, else_, stmts) ->None:
super().__init__(row, column)
self.expBool = expBool
self.elseif_list = elseif_list
self.else_ = else_
self.stmts = stmts
def execute(self, environment):
self.p_if_sum()
boolCode = self.expBool.execute(environment)
cod3d = boolCode.value
cod3d += '\tif ' + str(boolCode.temp) + ': goto .etiv' + str(
grammar.current_etiq + 1) + '\n'
grammar.optimizer_.addIF(str(boolCode.temp), str('etiv' + str(
grammar.current_etiq + 1)), self.row)
cod3d += '\tgoto .etif' + str(grammar.current_etiq + 2) + '\n'
grammar.optimizer_.addGoto(str('etif' + str(grammar.current_etiq +
2)), self.row)
grammar.back_fill.insert_true(grammar.current_etiq + 1)
grammar.back_fill.insert_false(grammar.current_etiq + 2)
grammar.current_etiq += 2
cod3d += self.p_iev()
codeElseif = ''
for stmt in self.stmts:
cod3d += stmt.execute(environment).value
self.index = grammar.optimizer_.addGoto(str('etiqS' + str(grammar.
next_etiq)), self.row) - 1
if len(self.elseif_list) > 0:
for elseif in self.elseif_list:
codeElseif += elseif.execute(environment).value
cod3d += self.p_fef()
cod3d += codeElseif
if self.else_ != None:
cod3d += self.else_.execute(environment).value
else:
cod3d += self.p_write_next_etiq()
cod3d += self.p_fev()
self.p_if_rest()
return code.C3D(cod3d, 'if', self.row, self.column)
def p_if_sum(self):
if grammar.if_stmt != 0:
grammar.back_fill.new_lists()
grammar.if_stmt += 1
def p_if_rest(self):
grammar.if_stmt -= 1
def p_iev(self):
return grammar.back_fill.take_out_true_list(self.row)
def p_fev(self):
return grammar.back_fill.take_out_false_list(self.row)
def p_fef(self):
val = '\tgoto .etiqS' + str(grammar.next_etiq) + '\n'
grammar.optimizer_.addGoto_IF(str('etiqS' + str(grammar.next_etiq)),
self.row, self.index)
val += grammar.back_fill.take_out_true_list(self.row)
return val
def p_write_next_etiq(self):
val = '\tlabel .etiqS' + str(grammar.next_etiq) + '\n'
grammar.optimizer_.addLabel(str('etiqS' + str(grammar.next_etiq)),
self.row)
grammar.next_etiq += 1
return val
def dot(self):
new = Nodo('IF')
new.addNode(self.expBool.dot())
then = Nodo('THEN')
new.addNode(then)
for s in self.stmts:
then.addNode(s.dot())
for eif in self.elseif_list:
new.addNode(eif.dot())
if self.else_:
new.addNode(self.else_.dot())
return new
<|reserved_special_token_1|>
from analizer_pl.abstract.instruction import Instruction
from analizer_pl import grammar
from analizer_pl.statement.expressions import code
from analizer_pl.reports.Nodo import Nodo
class If_Statement(Instruction):
def __init__(self, row, column,expBool, elseif_list,else_,stmts ) -> None:
super().__init__(row, column)
self.expBool = expBool
self.elseif_list=elseif_list
self.else_ = else_
self.stmts = stmts
def execute(self, environment):
self.p_if_sum()
boolCode = self.expBool.execute(environment)
cod3d = boolCode.value
cod3d += "\tif "+str(boolCode.temp)+": goto .etiv"+str(grammar.current_etiq+1)+"\n"
grammar.optimizer_.addIF(str(boolCode.temp),str("etiv"+str(grammar.current_etiq+1)),self.row)
cod3d+="\tgoto .etif"+ str(grammar.current_etiq+2)+"\n"
grammar.optimizer_.addGoto(str("etif"+str(grammar.current_etiq+2)),self.row)
grammar.back_fill.insert_true(grammar.current_etiq+1)
grammar.back_fill.insert_false(grammar.current_etiq+2)
grammar.current_etiq+=2
cod3d += self.p_iev()
codeElseif=""
for stmt in self.stmts:
cod3d +=stmt.execute(environment).value
self.index = grammar.optimizer_.addGoto(str("etiqS"+str(grammar.next_etiq)),self.row)-1
if len(self.elseif_list) > 0:
for elseif in self.elseif_list:
codeElseif += elseif.execute(environment).value
cod3d+=self.p_fef()
cod3d+=codeElseif
if self.else_ != None:
cod3d+=self.else_.execute(environment).value
else:
cod3d+=self.p_write_next_etiq()
cod3d+=self.p_fev()
self.p_if_rest()
return code.C3D(cod3d,"if",self.row,self.column)
def p_if_sum(self):
if grammar.if_stmt !=0:
grammar.back_fill.new_lists()
grammar.if_stmt+=1
def p_if_rest(self):
grammar.if_stmt -=1
def p_iev(self):
return grammar.back_fill.take_out_true_list(self.row)
def p_fev(self):
return grammar.back_fill.take_out_false_list(self.row)
def p_fef(self):
val ="\tgoto .etiqS"+ str(grammar.next_etiq)+"\n"
grammar.optimizer_.addGoto_IF(str("etiqS"+str(grammar.next_etiq)),self.row,self.index)
val +=grammar.back_fill.take_out_true_list(self.row)
return val
def p_write_next_etiq(self):
val="\tlabel .etiqS"+str(grammar.next_etiq)+"\n"
grammar.optimizer_.addLabel(str("etiqS"+str(grammar.next_etiq)),self.row)
grammar.next_etiq+=1
return val
def dot(self):
new = Nodo("IF")
new.addNode(self.expBool.dot())
then = Nodo("THEN")
new.addNode(then)
for s in self.stmts:
then.addNode(s.dot())
for eif in self.elseif_list:
new.addNode(eif.dot())
if self.else_:
new.addNode(self.else_.dot())
return new
|
flexible
|
{
"blob_id": "bbbdb30ceef920e600c9f46fb968732b077be2d8",
"index": 4231,
"step-1": "<mask token>\n\n\nclass If_Statement(Instruction):\n\n def __init__(self, row, column, expBool, elseif_list, else_, stmts) ->None:\n super().__init__(row, column)\n self.expBool = expBool\n self.elseif_list = elseif_list\n self.else_ = else_\n self.stmts = stmts\n <mask token>\n\n def p_if_sum(self):\n if grammar.if_stmt != 0:\n grammar.back_fill.new_lists()\n grammar.if_stmt += 1\n\n def p_if_rest(self):\n grammar.if_stmt -= 1\n\n def p_iev(self):\n return grammar.back_fill.take_out_true_list(self.row)\n\n def p_fev(self):\n return grammar.back_fill.take_out_false_list(self.row)\n <mask token>\n\n def p_write_next_etiq(self):\n val = '\\tlabel .etiqS' + str(grammar.next_etiq) + '\\n'\n grammar.optimizer_.addLabel(str('etiqS' + str(grammar.next_etiq)),\n self.row)\n grammar.next_etiq += 1\n return val\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass If_Statement(Instruction):\n\n def __init__(self, row, column, expBool, elseif_list, else_, stmts) ->None:\n super().__init__(row, column)\n self.expBool = expBool\n self.elseif_list = elseif_list\n self.else_ = else_\n self.stmts = stmts\n\n def execute(self, environment):\n self.p_if_sum()\n boolCode = self.expBool.execute(environment)\n cod3d = boolCode.value\n cod3d += '\\tif ' + str(boolCode.temp) + ': goto .etiv' + str(\n grammar.current_etiq + 1) + '\\n'\n grammar.optimizer_.addIF(str(boolCode.temp), str('etiv' + str(\n grammar.current_etiq + 1)), self.row)\n cod3d += '\\tgoto .etif' + str(grammar.current_etiq + 2) + '\\n'\n grammar.optimizer_.addGoto(str('etif' + str(grammar.current_etiq + \n 2)), self.row)\n grammar.back_fill.insert_true(grammar.current_etiq + 1)\n grammar.back_fill.insert_false(grammar.current_etiq + 2)\n grammar.current_etiq += 2\n cod3d += self.p_iev()\n codeElseif = ''\n for stmt in self.stmts:\n cod3d += stmt.execute(environment).value\n self.index = grammar.optimizer_.addGoto(str('etiqS' + str(grammar.\n next_etiq)), self.row) - 1\n if len(self.elseif_list) > 0:\n for elseif in self.elseif_list:\n codeElseif += elseif.execute(environment).value\n cod3d += self.p_fef()\n cod3d += codeElseif\n if self.else_ != None:\n cod3d += self.else_.execute(environment).value\n else:\n cod3d += self.p_write_next_etiq()\n cod3d += self.p_fev()\n self.p_if_rest()\n return code.C3D(cod3d, 'if', self.row, self.column)\n\n def p_if_sum(self):\n if grammar.if_stmt != 0:\n grammar.back_fill.new_lists()\n grammar.if_stmt += 1\n\n def p_if_rest(self):\n grammar.if_stmt -= 1\n\n def p_iev(self):\n return grammar.back_fill.take_out_true_list(self.row)\n\n def p_fev(self):\n return grammar.back_fill.take_out_false_list(self.row)\n\n def p_fef(self):\n val = '\\tgoto .etiqS' + str(grammar.next_etiq) + '\\n'\n grammar.optimizer_.addGoto_IF(str('etiqS' + str(grammar.next_etiq)),\n self.row, self.index)\n val += grammar.back_fill.take_out_true_list(self.row)\n return val\n\n def p_write_next_etiq(self):\n val = '\\tlabel .etiqS' + str(grammar.next_etiq) + '\\n'\n grammar.optimizer_.addLabel(str('etiqS' + str(grammar.next_etiq)),\n self.row)\n grammar.next_etiq += 1\n return val\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass If_Statement(Instruction):\n\n def __init__(self, row, column, expBool, elseif_list, else_, stmts) ->None:\n super().__init__(row, column)\n self.expBool = expBool\n self.elseif_list = elseif_list\n self.else_ = else_\n self.stmts = stmts\n\n def execute(self, environment):\n self.p_if_sum()\n boolCode = self.expBool.execute(environment)\n cod3d = boolCode.value\n cod3d += '\\tif ' + str(boolCode.temp) + ': goto .etiv' + str(\n grammar.current_etiq + 1) + '\\n'\n grammar.optimizer_.addIF(str(boolCode.temp), str('etiv' + str(\n grammar.current_etiq + 1)), self.row)\n cod3d += '\\tgoto .etif' + str(grammar.current_etiq + 2) + '\\n'\n grammar.optimizer_.addGoto(str('etif' + str(grammar.current_etiq + \n 2)), self.row)\n grammar.back_fill.insert_true(grammar.current_etiq + 1)\n grammar.back_fill.insert_false(grammar.current_etiq + 2)\n grammar.current_etiq += 2\n cod3d += self.p_iev()\n codeElseif = ''\n for stmt in self.stmts:\n cod3d += stmt.execute(environment).value\n self.index = grammar.optimizer_.addGoto(str('etiqS' + str(grammar.\n next_etiq)), self.row) - 1\n if len(self.elseif_list) > 0:\n for elseif in self.elseif_list:\n codeElseif += elseif.execute(environment).value\n cod3d += self.p_fef()\n cod3d += codeElseif\n if self.else_ != None:\n cod3d += self.else_.execute(environment).value\n else:\n cod3d += self.p_write_next_etiq()\n cod3d += self.p_fev()\n self.p_if_rest()\n return code.C3D(cod3d, 'if', self.row, self.column)\n\n def p_if_sum(self):\n if grammar.if_stmt != 0:\n grammar.back_fill.new_lists()\n grammar.if_stmt += 1\n\n def p_if_rest(self):\n grammar.if_stmt -= 1\n\n def p_iev(self):\n return grammar.back_fill.take_out_true_list(self.row)\n\n def p_fev(self):\n return grammar.back_fill.take_out_false_list(self.row)\n\n def p_fef(self):\n val = '\\tgoto .etiqS' + str(grammar.next_etiq) + '\\n'\n grammar.optimizer_.addGoto_IF(str('etiqS' + str(grammar.next_etiq)),\n self.row, self.index)\n val += grammar.back_fill.take_out_true_list(self.row)\n return val\n\n def p_write_next_etiq(self):\n val = '\\tlabel .etiqS' + str(grammar.next_etiq) + '\\n'\n grammar.optimizer_.addLabel(str('etiqS' + str(grammar.next_etiq)),\n self.row)\n grammar.next_etiq += 1\n return val\n\n def dot(self):\n new = Nodo('IF')\n new.addNode(self.expBool.dot())\n then = Nodo('THEN')\n new.addNode(then)\n for s in self.stmts:\n then.addNode(s.dot())\n for eif in self.elseif_list:\n new.addNode(eif.dot())\n if self.else_:\n new.addNode(self.else_.dot())\n return new\n",
"step-4": "from analizer_pl.abstract.instruction import Instruction\nfrom analizer_pl import grammar\nfrom analizer_pl.statement.expressions import code\nfrom analizer_pl.reports.Nodo import Nodo\n\n\nclass If_Statement(Instruction):\n\n def __init__(self, row, column, expBool, elseif_list, else_, stmts) ->None:\n super().__init__(row, column)\n self.expBool = expBool\n self.elseif_list = elseif_list\n self.else_ = else_\n self.stmts = stmts\n\n def execute(self, environment):\n self.p_if_sum()\n boolCode = self.expBool.execute(environment)\n cod3d = boolCode.value\n cod3d += '\\tif ' + str(boolCode.temp) + ': goto .etiv' + str(\n grammar.current_etiq + 1) + '\\n'\n grammar.optimizer_.addIF(str(boolCode.temp), str('etiv' + str(\n grammar.current_etiq + 1)), self.row)\n cod3d += '\\tgoto .etif' + str(grammar.current_etiq + 2) + '\\n'\n grammar.optimizer_.addGoto(str('etif' + str(grammar.current_etiq + \n 2)), self.row)\n grammar.back_fill.insert_true(grammar.current_etiq + 1)\n grammar.back_fill.insert_false(grammar.current_etiq + 2)\n grammar.current_etiq += 2\n cod3d += self.p_iev()\n codeElseif = ''\n for stmt in self.stmts:\n cod3d += stmt.execute(environment).value\n self.index = grammar.optimizer_.addGoto(str('etiqS' + str(grammar.\n next_etiq)), self.row) - 1\n if len(self.elseif_list) > 0:\n for elseif in self.elseif_list:\n codeElseif += elseif.execute(environment).value\n cod3d += self.p_fef()\n cod3d += codeElseif\n if self.else_ != None:\n cod3d += self.else_.execute(environment).value\n else:\n cod3d += self.p_write_next_etiq()\n cod3d += self.p_fev()\n self.p_if_rest()\n return code.C3D(cod3d, 'if', self.row, self.column)\n\n def p_if_sum(self):\n if grammar.if_stmt != 0:\n grammar.back_fill.new_lists()\n grammar.if_stmt += 1\n\n def p_if_rest(self):\n grammar.if_stmt -= 1\n\n def p_iev(self):\n return grammar.back_fill.take_out_true_list(self.row)\n\n def p_fev(self):\n return grammar.back_fill.take_out_false_list(self.row)\n\n def p_fef(self):\n val = '\\tgoto .etiqS' + str(grammar.next_etiq) + '\\n'\n grammar.optimizer_.addGoto_IF(str('etiqS' + str(grammar.next_etiq)),\n self.row, self.index)\n val += grammar.back_fill.take_out_true_list(self.row)\n return val\n\n def p_write_next_etiq(self):\n val = '\\tlabel .etiqS' + str(grammar.next_etiq) + '\\n'\n grammar.optimizer_.addLabel(str('etiqS' + str(grammar.next_etiq)),\n self.row)\n grammar.next_etiq += 1\n return val\n\n def dot(self):\n new = Nodo('IF')\n new.addNode(self.expBool.dot())\n then = Nodo('THEN')\n new.addNode(then)\n for s in self.stmts:\n then.addNode(s.dot())\n for eif in self.elseif_list:\n new.addNode(eif.dot())\n if self.else_:\n new.addNode(self.else_.dot())\n return new\n",
"step-5": "from analizer_pl.abstract.instruction import Instruction\nfrom analizer_pl import grammar\nfrom analizer_pl.statement.expressions import code\nfrom analizer_pl.reports.Nodo import Nodo\n\n\nclass If_Statement(Instruction):\n \n def __init__(self, row, column,expBool, elseif_list,else_,stmts ) -> None:\n super().__init__(row, column)\n self.expBool = expBool\n self.elseif_list=elseif_list\n self.else_ = else_\n self.stmts = stmts\n\n def execute(self, environment):\n self.p_if_sum()\n boolCode = self.expBool.execute(environment)\n cod3d = boolCode.value\n cod3d += \"\\tif \"+str(boolCode.temp)+\": goto .etiv\"+str(grammar.current_etiq+1)+\"\\n\"\n grammar.optimizer_.addIF(str(boolCode.temp),str(\"etiv\"+str(grammar.current_etiq+1)),self.row)\n cod3d+=\"\\tgoto .etif\"+ str(grammar.current_etiq+2)+\"\\n\"\n grammar.optimizer_.addGoto(str(\"etif\"+str(grammar.current_etiq+2)),self.row)\n grammar.back_fill.insert_true(grammar.current_etiq+1)\n grammar.back_fill.insert_false(grammar.current_etiq+2)\n grammar.current_etiq+=2\n cod3d += self.p_iev()\n codeElseif=\"\"\n for stmt in self.stmts:\n cod3d +=stmt.execute(environment).value\n self.index = grammar.optimizer_.addGoto(str(\"etiqS\"+str(grammar.next_etiq)),self.row)-1\n if len(self.elseif_list) > 0:\n for elseif in self.elseif_list:\n codeElseif += elseif.execute(environment).value\n cod3d+=self.p_fef() \n cod3d+=codeElseif\n if self.else_ != None:\n cod3d+=self.else_.execute(environment).value\n else:\n cod3d+=self.p_write_next_etiq()\n cod3d+=self.p_fev()\n self.p_if_rest()\n return code.C3D(cod3d,\"if\",self.row,self.column)\n\n def p_if_sum(self):\n if grammar.if_stmt !=0:\n grammar.back_fill.new_lists()\n grammar.if_stmt+=1\n\n def p_if_rest(self):\n grammar.if_stmt -=1\n\n def p_iev(self):\n return grammar.back_fill.take_out_true_list(self.row)\n\n def p_fev(self):\n return grammar.back_fill.take_out_false_list(self.row)\n\n def p_fef(self):\n val =\"\\tgoto .etiqS\"+ str(grammar.next_etiq)+\"\\n\"\n grammar.optimizer_.addGoto_IF(str(\"etiqS\"+str(grammar.next_etiq)),self.row,self.index) \n val +=grammar.back_fill.take_out_true_list(self.row)\n return val\n\n def p_write_next_etiq(self):\n val=\"\\tlabel .etiqS\"+str(grammar.next_etiq)+\"\\n\"\n grammar.optimizer_.addLabel(str(\"etiqS\"+str(grammar.next_etiq)),self.row)\n grammar.next_etiq+=1\n return val\n\n def dot(self):\n new = Nodo(\"IF\")\n new.addNode(self.expBool.dot())\n then = Nodo(\"THEN\")\n new.addNode(then)\n for s in self.stmts:\n then.addNode(s.dot())\n for eif in self.elseif_list:\n new.addNode(eif.dot())\n\n if self.else_:\n new.addNode(self.else_.dot())\n return new",
"step-ids": [
7,
9,
10,
11,
12
]
}
|
[
7,
9,
10,
11,
12
] |
<|reserved_special_token_0|>
def get_position_from_angle(razon, data, start, end):
dni_df, altitude_angles, azimuth_angles = data
cos_correct_df = razon.get_cos_factors(altitude_angles, azimuth_angles)
dni_df = razon.cos_correct(dni_df, cos_correct_df)
angles = pd.DataFrame()
angles['Theta'] = dni_df['Theta_']
angles['Phi'] = dni_df['Phi_']
def match_angles_wrapper(angles):
mapping = read_and_clean_angle_position()
return match_angles(mapping, angles[0], angles[1])
positions = angles.apply(match_angles_wrapper, axis=1)
positions = [(x[0], x[1]) for x in positions]
positions = zip(*positions)
positions = pd.DataFrame(positions).transpose()
positions['Datetime Local'] = dni_df['Datetime Local']
return positions
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_position_from_angle(razon, data, start, end):
dni_df, altitude_angles, azimuth_angles = data
cos_correct_df = razon.get_cos_factors(altitude_angles, azimuth_angles)
dni_df = razon.cos_correct(dni_df, cos_correct_df)
angles = pd.DataFrame()
angles['Theta'] = dni_df['Theta_']
angles['Phi'] = dni_df['Phi_']
def match_angles_wrapper(angles):
mapping = read_and_clean_angle_position()
return match_angles(mapping, angles[0], angles[1])
positions = angles.apply(match_angles_wrapper, axis=1)
positions = [(x[0], x[1]) for x in positions]
positions = zip(*positions)
positions = pd.DataFrame(positions).transpose()
positions['Datetime Local'] = dni_df['Datetime Local']
return positions
def main():
razon = RaZON(lat=37.595932, lon=-122.368848, panel_tilt=20, razonIP=
'192.168.15.150')
now = dt.datetime(2018, 4, 9)
start = dt.datetime(year=now.year, month=now.month, day=now.day, hour=
13, minute=45, second=0)
end = dt.datetime(year=now.year, month=now.month, day=now.day, hour=16,
minute=0, second=0)
data = razon.request_interval(now, start, end)
positions = get_position_from_angle(razon, data, start, end)
print(positions)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_position_from_angle(razon, data, start, end):
dni_df, altitude_angles, azimuth_angles = data
cos_correct_df = razon.get_cos_factors(altitude_angles, azimuth_angles)
dni_df = razon.cos_correct(dni_df, cos_correct_df)
angles = pd.DataFrame()
angles['Theta'] = dni_df['Theta_']
angles['Phi'] = dni_df['Phi_']
def match_angles_wrapper(angles):
mapping = read_and_clean_angle_position()
return match_angles(mapping, angles[0], angles[1])
positions = angles.apply(match_angles_wrapper, axis=1)
positions = [(x[0], x[1]) for x in positions]
positions = zip(*positions)
positions = pd.DataFrame(positions).transpose()
positions['Datetime Local'] = dni_df['Datetime Local']
return positions
def main():
razon = RaZON(lat=37.595932, lon=-122.368848, panel_tilt=20, razonIP=
'192.168.15.150')
now = dt.datetime(2018, 4, 9)
start = dt.datetime(year=now.year, month=now.month, day=now.day, hour=
13, minute=45, second=0)
end = dt.datetime(year=now.year, month=now.month, day=now.day, hour=16,
minute=0, second=0)
data = razon.request_interval(now, start, end)
positions = get_position_from_angle(razon, data, start, end)
print(positions)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
from cos_correct_v2 import *
from angle_to_position import *
import pandas as pd
import datetime as dt
def get_position_from_angle(razon, data, start, end):
dni_df, altitude_angles, azimuth_angles = data
cos_correct_df = razon.get_cos_factors(altitude_angles, azimuth_angles)
dni_df = razon.cos_correct(dni_df, cos_correct_df)
angles = pd.DataFrame()
angles['Theta'] = dni_df['Theta_']
angles['Phi'] = dni_df['Phi_']
def match_angles_wrapper(angles):
mapping = read_and_clean_angle_position()
return match_angles(mapping, angles[0], angles[1])
positions = angles.apply(match_angles_wrapper, axis=1)
positions = [(x[0], x[1]) for x in positions]
positions = zip(*positions)
positions = pd.DataFrame(positions).transpose()
positions['Datetime Local'] = dni_df['Datetime Local']
return positions
def main():
razon = RaZON(lat=37.595932, lon=-122.368848, panel_tilt=20, razonIP=
'192.168.15.150')
now = dt.datetime(2018, 4, 9)
start = dt.datetime(year=now.year, month=now.month, day=now.day, hour=
13, minute=45, second=0)
end = dt.datetime(year=now.year, month=now.month, day=now.day, hour=16,
minute=0, second=0)
data = razon.request_interval(now, start, end)
positions = get_position_from_angle(razon, data, start, end)
print(positions)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
#!/usr/bin/env python
from cos_correct_v2 import *
from angle_to_position import *
import pandas as pd
import datetime as dt
def get_position_from_angle(razon, data, start, end):
# Obtain cos factors and corrected data
dni_df, altitude_angles, azimuth_angles = data
cos_correct_df = razon.get_cos_factors(altitude_angles, azimuth_angles)
dni_df = razon.cos_correct(dni_df, cos_correct_df)
# print(dni_df)
angles = pd.DataFrame()
angles['Theta'] = dni_df['Theta_']
angles['Phi'] = dni_df['Phi_']
# angles['Time'] = dni_df['Time (hh:mm:ss)']
# angles['Datetime Local'] = dni_df['Datetime Local']
# print(angles)
def match_angles_wrapper(angles):
mapping = read_and_clean_angle_position()
return match_angles(mapping, angles[0], angles[1])
positions = angles.apply(match_angles_wrapper, axis=1)
# print(positions)
positions = [(x[0], x[1]) for x in positions]
positions = zip(*positions)
# print(positions)
positions = pd.DataFrame(positions).transpose()
positions['Datetime Local'] = dni_df['Datetime Local']
# print(positions)
return positions
def main():
# Communicate to RaZON through local webpage
razon = RaZON(lat=37.595932, lon=-122.368848, panel_tilt=20, razonIP="192.168.15.150")
# Use RaZON.get_local_datetime
# now = razon.get_local_datetime() - dt.timedelta(days=1)
now = dt.datetime(2018, 4, 9)
# Samples data between two datetime objects (date is supplied by )
start = dt.datetime(year=now.year,
month=now.month,
day=now.day,
hour=13,
minute=45,
second=0)
end = dt.datetime(year=now.year,
month=now.month,
day=now.day,
hour=16,
minute=0,
second=0)
data = razon.request_interval(now, start, end)
positions = get_position_from_angle(razon, data, start, end)
print(positions)
# # Loop through appropriate angles:
# for angle in angles:
# mapping = read_and_clean_angle_position()
# x, y = match_angles(mapping, theta, phi)
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "13a4fb5ce9ab0a3ef9ce503698615eae4157a637",
"index": 7962,
"step-1": "<mask token>\n\n\ndef get_position_from_angle(razon, data, start, end):\n dni_df, altitude_angles, azimuth_angles = data\n cos_correct_df = razon.get_cos_factors(altitude_angles, azimuth_angles)\n dni_df = razon.cos_correct(dni_df, cos_correct_df)\n angles = pd.DataFrame()\n angles['Theta'] = dni_df['Theta_']\n angles['Phi'] = dni_df['Phi_']\n\n def match_angles_wrapper(angles):\n mapping = read_and_clean_angle_position()\n return match_angles(mapping, angles[0], angles[1])\n positions = angles.apply(match_angles_wrapper, axis=1)\n positions = [(x[0], x[1]) for x in positions]\n positions = zip(*positions)\n positions = pd.DataFrame(positions).transpose()\n positions['Datetime Local'] = dni_df['Datetime Local']\n return positions\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_position_from_angle(razon, data, start, end):\n dni_df, altitude_angles, azimuth_angles = data\n cos_correct_df = razon.get_cos_factors(altitude_angles, azimuth_angles)\n dni_df = razon.cos_correct(dni_df, cos_correct_df)\n angles = pd.DataFrame()\n angles['Theta'] = dni_df['Theta_']\n angles['Phi'] = dni_df['Phi_']\n\n def match_angles_wrapper(angles):\n mapping = read_and_clean_angle_position()\n return match_angles(mapping, angles[0], angles[1])\n positions = angles.apply(match_angles_wrapper, axis=1)\n positions = [(x[0], x[1]) for x in positions]\n positions = zip(*positions)\n positions = pd.DataFrame(positions).transpose()\n positions['Datetime Local'] = dni_df['Datetime Local']\n return positions\n\n\ndef main():\n razon = RaZON(lat=37.595932, lon=-122.368848, panel_tilt=20, razonIP=\n '192.168.15.150')\n now = dt.datetime(2018, 4, 9)\n start = dt.datetime(year=now.year, month=now.month, day=now.day, hour=\n 13, minute=45, second=0)\n end = dt.datetime(year=now.year, month=now.month, day=now.day, hour=16,\n minute=0, second=0)\n data = razon.request_interval(now, start, end)\n positions = get_position_from_angle(razon, data, start, end)\n print(positions)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_position_from_angle(razon, data, start, end):\n dni_df, altitude_angles, azimuth_angles = data\n cos_correct_df = razon.get_cos_factors(altitude_angles, azimuth_angles)\n dni_df = razon.cos_correct(dni_df, cos_correct_df)\n angles = pd.DataFrame()\n angles['Theta'] = dni_df['Theta_']\n angles['Phi'] = dni_df['Phi_']\n\n def match_angles_wrapper(angles):\n mapping = read_and_clean_angle_position()\n return match_angles(mapping, angles[0], angles[1])\n positions = angles.apply(match_angles_wrapper, axis=1)\n positions = [(x[0], x[1]) for x in positions]\n positions = zip(*positions)\n positions = pd.DataFrame(positions).transpose()\n positions['Datetime Local'] = dni_df['Datetime Local']\n return positions\n\n\ndef main():\n razon = RaZON(lat=37.595932, lon=-122.368848, panel_tilt=20, razonIP=\n '192.168.15.150')\n now = dt.datetime(2018, 4, 9)\n start = dt.datetime(year=now.year, month=now.month, day=now.day, hour=\n 13, minute=45, second=0)\n end = dt.datetime(year=now.year, month=now.month, day=now.day, hour=16,\n minute=0, second=0)\n data = razon.request_interval(now, start, end)\n positions = get_position_from_angle(razon, data, start, end)\n print(positions)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from cos_correct_v2 import *\nfrom angle_to_position import *\nimport pandas as pd\nimport datetime as dt\n\n\ndef get_position_from_angle(razon, data, start, end):\n dni_df, altitude_angles, azimuth_angles = data\n cos_correct_df = razon.get_cos_factors(altitude_angles, azimuth_angles)\n dni_df = razon.cos_correct(dni_df, cos_correct_df)\n angles = pd.DataFrame()\n angles['Theta'] = dni_df['Theta_']\n angles['Phi'] = dni_df['Phi_']\n\n def match_angles_wrapper(angles):\n mapping = read_and_clean_angle_position()\n return match_angles(mapping, angles[0], angles[1])\n positions = angles.apply(match_angles_wrapper, axis=1)\n positions = [(x[0], x[1]) for x in positions]\n positions = zip(*positions)\n positions = pd.DataFrame(positions).transpose()\n positions['Datetime Local'] = dni_df['Datetime Local']\n return positions\n\n\ndef main():\n razon = RaZON(lat=37.595932, lon=-122.368848, panel_tilt=20, razonIP=\n '192.168.15.150')\n now = dt.datetime(2018, 4, 9)\n start = dt.datetime(year=now.year, month=now.month, day=now.day, hour=\n 13, minute=45, second=0)\n end = dt.datetime(year=now.year, month=now.month, day=now.day, hour=16,\n minute=0, second=0)\n data = razon.request_interval(now, start, end)\n positions = get_position_from_angle(razon, data, start, end)\n print(positions)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/env python \n\nfrom cos_correct_v2 import *\nfrom angle_to_position import *\nimport pandas as pd\nimport datetime as dt\n\ndef get_position_from_angle(razon, data, start, end):\n # Obtain cos factors and corrected data\n dni_df, altitude_angles, azimuth_angles = data\n cos_correct_df = razon.get_cos_factors(altitude_angles, azimuth_angles)\n dni_df = razon.cos_correct(dni_df, cos_correct_df)\n # print(dni_df)\n\n angles = pd.DataFrame()\n angles['Theta'] = dni_df['Theta_']\n angles['Phi'] = dni_df['Phi_']\n # angles['Time'] = dni_df['Time (hh:mm:ss)']\n # angles['Datetime Local'] = dni_df['Datetime Local']\n # print(angles)\n\n def match_angles_wrapper(angles):\n mapping = read_and_clean_angle_position()\n return match_angles(mapping, angles[0], angles[1])\n\n positions = angles.apply(match_angles_wrapper, axis=1)\n # print(positions)\n positions = [(x[0], x[1]) for x in positions]\n positions = zip(*positions)\n # print(positions)\n positions = pd.DataFrame(positions).transpose()\n positions['Datetime Local'] = dni_df['Datetime Local']\n # print(positions)\n return positions\n\ndef main():\n # Communicate to RaZON through local webpage\n razon = RaZON(lat=37.595932, lon=-122.368848, panel_tilt=20, razonIP=\"192.168.15.150\")\n # Use RaZON.get_local_datetime\n # now = razon.get_local_datetime() - dt.timedelta(days=1)\n now = dt.datetime(2018, 4, 9)\n\n # Samples data between two datetime objects (date is supplied by )\n start = dt.datetime(year=now.year, \n month=now.month, \n day=now.day, \n hour=13, \n minute=45, \n second=0)\n end = dt.datetime(year=now.year, \n month=now.month, \n day=now.day, \n hour=16, \n minute=0, \n second=0)\n\n data = razon.request_interval(now, start, end)\n positions = get_position_from_angle(razon, data, start, end)\n print(positions)\n\n # # Loop through appropriate angles:\n # for angle in angles:\n # mapping = read_and_clean_angle_position()\n # x, y = match_angles(mapping, theta, phi)\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import numpy as np
from .basic import scRefData, featureSelection
from .utils import find_variable_genes, dropout_linear_model
from .process import find_de_tt, find_de_anova
"""
after normalization
befor cluster or nn_indexing
"""
class highlyVarSelecter(featureSelection):
"""
select highly varable genes;
"""
def __init__(self, num_features=None):
self.num_features = num_features
self.process = "select highly varable genes"
def __call__(self, sco):
expression = sco.expression_matrix
vg = find_variable_genes(expression)
if self.num_features is not None:
if len(vg) < self.num_features:
print("not enough candidate genes")
self.num_features = len(vg)
vg = np.random.choice(vg, size=self.num_features, replace=False)
self.selected_features = vg
return self._proc_o(sco)
class randomSelecter(featureSelection):
def __init__(self, num_features=500):
self.num_features = num_features
self.process = "select genes randomly"
def __call__(self, sco):
h, _ = sco.expression_matrix.shape
self.selected_features = np.random.choice(
np.arange(h), size=self.num_features, replace=False)
return self._proc_o(sco)
class dropOutSelecter(featureSelection):
"""
linear modle high drop out select
"""
def __init__(self, num_features=None, large=False):
self.process = "select genes by dropout"
self.num_features = num_features
self.large = large
def __call__(self, sco):
expression = sco.expression_matrix
(s_features, _) = dropout_linear_model(
expression, self.num_features, self.large)
self.selected_features = s_features
return self._proc_o(sco)
class manualSelecter(featureSelection):
"""
manual select
give list of genes
"""
def __init__(self, gene_list):
self.process = "manual select genes"
self.gene_list = gene_list
def __call__(self, sco):
self.selected_features = sco.gene_to_index(self.gene_list)
return self._proc_o(sco)
class markerSelecter_tt(featureSelection):
"""
for labeled data only
select cluster marker as feature
"""
def __init__(self, num_features=500):
self.process = "select genes by cluster marker"
self.num_features = num_features
def __call__(self, sco):
assert hasattr(sco, 'labels'), "noly for labeled data"
lab = sco.labels
fr = find_de_tt(lab, sco.expression_matrix, self.num_features)
self.selected_features = fr
return self._proc_o(sco)
class markerSelecter_anova(featureSelection):
"""
for labeled data only
select cluster marker as feature
"""
def __init__(self, num_features=500):
self.process = "select genes by cluster marker"
self.num_features = num_features
def __call__(self, sco):
assert hasattr(sco, 'labels'), "noly for labeled data"
lab = sco.labels
fr = find_de_anova(lab, sco.expression_matrix, self.num_features)
self.selected_features = fr
return self._proc_o(sco)
|
normal
|
{
"blob_id": "c972f732553f27261d2a4a03e6e353f2e1b5f5d3",
"index": 8256,
"step-1": "<mask token>\n\n\nclass manualSelecter(featureSelection):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass markerSelecter_tt(featureSelection):\n \"\"\"\n for labeled data only\n select cluster marker as feature\n \"\"\"\n\n def __init__(self, num_features=500):\n self.process = 'select genes by cluster marker'\n self.num_features = num_features\n\n def __call__(self, sco):\n assert hasattr(sco, 'labels'), 'noly for labeled data'\n lab = sco.labels\n fr = find_de_tt(lab, sco.expression_matrix, self.num_features)\n self.selected_features = fr\n return self._proc_o(sco)\n\n\nclass markerSelecter_anova(featureSelection):\n \"\"\"\n for labeled data only\n select cluster marker as feature\n \"\"\"\n\n def __init__(self, num_features=500):\n self.process = 'select genes by cluster marker'\n self.num_features = num_features\n\n def __call__(self, sco):\n assert hasattr(sco, 'labels'), 'noly for labeled data'\n lab = sco.labels\n fr = find_de_anova(lab, sco.expression_matrix, self.num_features)\n self.selected_features = fr\n return self._proc_o(sco)\n",
"step-2": "<mask token>\n\n\nclass manualSelecter(featureSelection):\n <mask token>\n\n def __init__(self, gene_list):\n self.process = 'manual select genes'\n self.gene_list = gene_list\n\n def __call__(self, sco):\n self.selected_features = sco.gene_to_index(self.gene_list)\n return self._proc_o(sco)\n\n\nclass markerSelecter_tt(featureSelection):\n \"\"\"\n for labeled data only\n select cluster marker as feature\n \"\"\"\n\n def __init__(self, num_features=500):\n self.process = 'select genes by cluster marker'\n self.num_features = num_features\n\n def __call__(self, sco):\n assert hasattr(sco, 'labels'), 'noly for labeled data'\n lab = sco.labels\n fr = find_de_tt(lab, sco.expression_matrix, self.num_features)\n self.selected_features = fr\n return self._proc_o(sco)\n\n\nclass markerSelecter_anova(featureSelection):\n \"\"\"\n for labeled data only\n select cluster marker as feature\n \"\"\"\n\n def __init__(self, num_features=500):\n self.process = 'select genes by cluster marker'\n self.num_features = num_features\n\n def __call__(self, sco):\n assert hasattr(sco, 'labels'), 'noly for labeled data'\n lab = sco.labels\n fr = find_de_anova(lab, sco.expression_matrix, self.num_features)\n self.selected_features = fr\n return self._proc_o(sco)\n",
"step-3": "<mask token>\n\n\nclass highlyVarSelecter(featureSelection):\n <mask token>\n\n def __init__(self, num_features=None):\n self.num_features = num_features\n self.process = 'select highly varable genes'\n <mask token>\n\n\nclass randomSelecter(featureSelection):\n\n def __init__(self, num_features=500):\n self.num_features = num_features\n self.process = 'select genes randomly'\n\n def __call__(self, sco):\n h, _ = sco.expression_matrix.shape\n self.selected_features = np.random.choice(np.arange(h), size=self.\n num_features, replace=False)\n return self._proc_o(sco)\n\n\nclass dropOutSelecter(featureSelection):\n \"\"\"\n linear modle high drop out select\n \"\"\"\n\n def __init__(self, num_features=None, large=False):\n self.process = 'select genes by dropout'\n self.num_features = num_features\n self.large = large\n\n def __call__(self, sco):\n expression = sco.expression_matrix\n s_features, _ = dropout_linear_model(expression, self.num_features,\n self.large)\n self.selected_features = s_features\n return self._proc_o(sco)\n\n\nclass manualSelecter(featureSelection):\n \"\"\"\n manual select\n give list of genes\n \"\"\"\n\n def __init__(self, gene_list):\n self.process = 'manual select genes'\n self.gene_list = gene_list\n\n def __call__(self, sco):\n self.selected_features = sco.gene_to_index(self.gene_list)\n return self._proc_o(sco)\n\n\nclass markerSelecter_tt(featureSelection):\n \"\"\"\n for labeled data only\n select cluster marker as feature\n \"\"\"\n\n def __init__(self, num_features=500):\n self.process = 'select genes by cluster marker'\n self.num_features = num_features\n\n def __call__(self, sco):\n assert hasattr(sco, 'labels'), 'noly for labeled data'\n lab = sco.labels\n fr = find_de_tt(lab, sco.expression_matrix, self.num_features)\n self.selected_features = fr\n return self._proc_o(sco)\n\n\nclass markerSelecter_anova(featureSelection):\n \"\"\"\n for labeled data only\n select cluster marker as feature\n \"\"\"\n\n def __init__(self, num_features=500):\n self.process = 'select genes by cluster marker'\n self.num_features = num_features\n\n def __call__(self, sco):\n assert hasattr(sco, 'labels'), 'noly for labeled data'\n lab = sco.labels\n fr = find_de_anova(lab, sco.expression_matrix, self.num_features)\n self.selected_features = fr\n return self._proc_o(sco)\n",
"step-4": "<mask token>\n\n\nclass highlyVarSelecter(featureSelection):\n \"\"\"\n select highly varable genes;\n \"\"\"\n\n def __init__(self, num_features=None):\n self.num_features = num_features\n self.process = 'select highly varable genes'\n\n def __call__(self, sco):\n expression = sco.expression_matrix\n vg = find_variable_genes(expression)\n if self.num_features is not None:\n if len(vg) < self.num_features:\n print('not enough candidate genes')\n self.num_features = len(vg)\n vg = np.random.choice(vg, size=self.num_features, replace=False)\n self.selected_features = vg\n return self._proc_o(sco)\n\n\nclass randomSelecter(featureSelection):\n\n def __init__(self, num_features=500):\n self.num_features = num_features\n self.process = 'select genes randomly'\n\n def __call__(self, sco):\n h, _ = sco.expression_matrix.shape\n self.selected_features = np.random.choice(np.arange(h), size=self.\n num_features, replace=False)\n return self._proc_o(sco)\n\n\nclass dropOutSelecter(featureSelection):\n \"\"\"\n linear modle high drop out select\n \"\"\"\n\n def __init__(self, num_features=None, large=False):\n self.process = 'select genes by dropout'\n self.num_features = num_features\n self.large = large\n\n def __call__(self, sco):\n expression = sco.expression_matrix\n s_features, _ = dropout_linear_model(expression, self.num_features,\n self.large)\n self.selected_features = s_features\n return self._proc_o(sco)\n\n\nclass manualSelecter(featureSelection):\n \"\"\"\n manual select\n give list of genes\n \"\"\"\n\n def __init__(self, gene_list):\n self.process = 'manual select genes'\n self.gene_list = gene_list\n\n def __call__(self, sco):\n self.selected_features = sco.gene_to_index(self.gene_list)\n return self._proc_o(sco)\n\n\nclass markerSelecter_tt(featureSelection):\n \"\"\"\n for labeled data only\n select cluster marker as feature\n \"\"\"\n\n def __init__(self, num_features=500):\n self.process = 'select genes by cluster marker'\n self.num_features = num_features\n\n def __call__(self, sco):\n assert hasattr(sco, 'labels'), 'noly for labeled data'\n lab = sco.labels\n fr = find_de_tt(lab, sco.expression_matrix, self.num_features)\n self.selected_features = fr\n return self._proc_o(sco)\n\n\nclass markerSelecter_anova(featureSelection):\n \"\"\"\n for labeled data only\n select cluster marker as feature\n \"\"\"\n\n def __init__(self, num_features=500):\n self.process = 'select genes by cluster marker'\n self.num_features = num_features\n\n def __call__(self, sco):\n assert hasattr(sco, 'labels'), 'noly for labeled data'\n lab = sco.labels\n fr = find_de_anova(lab, sco.expression_matrix, self.num_features)\n self.selected_features = fr\n return self._proc_o(sco)\n",
"step-5": "import numpy as np\nfrom .basic import scRefData, featureSelection\nfrom .utils import find_variable_genes, dropout_linear_model\nfrom .process import find_de_tt, find_de_anova\n\"\"\"\nafter normalization\nbefor cluster or nn_indexing\n\"\"\"\n\n\nclass highlyVarSelecter(featureSelection):\n \"\"\"\n select highly varable genes;\n \"\"\"\n def __init__(self, num_features=None):\n self.num_features = num_features\n self.process = \"select highly varable genes\"\n\n def __call__(self, sco):\n expression = sco.expression_matrix\n vg = find_variable_genes(expression)\n if self.num_features is not None:\n if len(vg) < self.num_features:\n print(\"not enough candidate genes\")\n self.num_features = len(vg)\n vg = np.random.choice(vg, size=self.num_features, replace=False)\n self.selected_features = vg\n return self._proc_o(sco)\n\n\nclass randomSelecter(featureSelection):\n def __init__(self, num_features=500):\n self.num_features = num_features\n self.process = \"select genes randomly\"\n\n def __call__(self, sco):\n h, _ = sco.expression_matrix.shape\n self.selected_features = np.random.choice(\n np.arange(h), size=self.num_features, replace=False)\n return self._proc_o(sco)\n\n\nclass dropOutSelecter(featureSelection):\n \"\"\"\n linear modle high drop out select\n \"\"\"\n def __init__(self, num_features=None, large=False):\n self.process = \"select genes by dropout\"\n self.num_features = num_features\n self.large = large\n\n def __call__(self, sco):\n expression = sco.expression_matrix\n (s_features, _) = dropout_linear_model(\n expression, self.num_features, self.large)\n self.selected_features = s_features\n return self._proc_o(sco)\n\n\nclass manualSelecter(featureSelection):\n \"\"\"\n manual select\n give list of genes\n \"\"\"\n def __init__(self, gene_list):\n self.process = \"manual select genes\"\n self.gene_list = gene_list\n\n def __call__(self, sco):\n self.selected_features = sco.gene_to_index(self.gene_list)\n return self._proc_o(sco)\n\n\nclass markerSelecter_tt(featureSelection):\n \"\"\"\n for labeled data only\n select cluster marker as feature\n \"\"\"\n def __init__(self, num_features=500):\n self.process = \"select genes by cluster marker\"\n self.num_features = num_features\n\n def __call__(self, sco):\n assert hasattr(sco, 'labels'), \"noly for labeled data\"\n lab = sco.labels\n fr = find_de_tt(lab, sco.expression_matrix, self.num_features)\n self.selected_features = fr\n return self._proc_o(sco)\n\nclass markerSelecter_anova(featureSelection):\n \"\"\"\n for labeled data only\n select cluster marker as feature\n \"\"\"\n def __init__(self, num_features=500):\n self.process = \"select genes by cluster marker\"\n self.num_features = num_features\n\n def __call__(self, sco):\n assert hasattr(sco, 'labels'), \"noly for labeled data\"\n lab = sco.labels\n fr = find_de_anova(lab, sco.expression_matrix, self.num_features)\n self.selected_features = fr\n return self._proc_o(sco)",
"step-ids": [
9,
11,
21,
23,
25
]
}
|
[
9,
11,
21,
23,
25
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('翻译结果:%s' % target['translateResult'][0][0]['tgt'])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
content = input('请输入需要翻译的内容:')
url = 'http://fanyi.youdao.com/translate?smartresult=dict&smartresult=rule'
data = {}
data['action'] = 'FY_BY_CLICKBUTTION'
data['bv'] = '1ca13a5465c2ab126e616ee8d6720cc3'
data['client'] = 'fanyideskweb'
data['doctype'] = 'json'
data['from'] = 'AUTO'
data['i'] = content
data['keyfrom'] = 'fanyi.web'
data['salt'] = '15708737847078'
data['sign'] = '64037c1dd211ea7bd98321a3bd8ab45a'
data['smartresult'] = 'dict'
data['to'] = 'AUTO'
data['ts'] = '1570873784707'
data['version'] = '2.1'
data = urllib.parse.urlencode(data).encode('utf-8')
response = urllib.request.urlopen(url, data)
html = response.read().decode('utf-8')
target = json.loads(html)
print('翻译结果:%s' % target['translateResult'][0][0]['tgt'])
<|reserved_special_token_1|>
import urllib.request
import urllib.parse
import json
content = input('请输入需要翻译的内容:')
url = 'http://fanyi.youdao.com/translate?smartresult=dict&smartresult=rule'
data = {}
data['action'] = 'FY_BY_CLICKBUTTION'
data['bv'] = '1ca13a5465c2ab126e616ee8d6720cc3'
data['client'] = 'fanyideskweb'
data['doctype'] = 'json'
data['from'] = 'AUTO'
data['i'] = content
data['keyfrom'] = 'fanyi.web'
data['salt'] = '15708737847078'
data['sign'] = '64037c1dd211ea7bd98321a3bd8ab45a'
data['smartresult'] = 'dict'
data['to'] = 'AUTO'
data['ts'] = '1570873784707'
data['version'] = '2.1'
data = urllib.parse.urlencode(data).encode('utf-8')
response = urllib.request.urlopen(url, data)
html = response.read().decode('utf-8')
target = json.loads(html)
print('翻译结果:%s' % target['translateResult'][0][0]['tgt'])
<|reserved_special_token_1|>
import urllib.request
import urllib.parse
import json
content = input("请输入需要翻译的内容:")
url = 'http://fanyi.youdao.com/translate?smartresult=dict&smartresult=rule'
data = {}
data['action'] = 'FY_BY_CLICKBUTTION'
data['bv'] = '1ca13a5465c2ab126e616ee8d6720cc3'
data['client'] = 'fanyideskweb'
data['doctype'] = 'json'
data['from'] = 'AUTO'
data['i'] = content
data['keyfrom'] = 'fanyi.web'
data['salt'] = '15708737847078'
data['sign'] = '64037c1dd211ea7bd98321a3bd8ab45a'
data['smartresult'] = 'dict'
data['to'] = 'AUTO'
data['ts'] = '1570873784707'
data['version'] = '2.1'
data = urllib.parse.urlencode(data).encode('utf-8')
response = urllib.request.urlopen(url,data)
html = response.read().decode('utf-8')
target = json.loads(html)
print("翻译结果:%s" % (target['translateResult'][0][0]['tgt']))
|
flexible
|
{
"blob_id": "e01b1f57a572571619d6c0981370030dc6105fd2",
"index": 8636,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('翻译结果:%s' % target['translateResult'][0][0]['tgt'])\n",
"step-3": "<mask token>\ncontent = input('请输入需要翻译的内容:')\nurl = 'http://fanyi.youdao.com/translate?smartresult=dict&smartresult=rule'\ndata = {}\ndata['action'] = 'FY_BY_CLICKBUTTION'\ndata['bv'] = '1ca13a5465c2ab126e616ee8d6720cc3'\ndata['client'] = 'fanyideskweb'\ndata['doctype'] = 'json'\ndata['from'] = 'AUTO'\ndata['i'] = content\ndata['keyfrom'] = 'fanyi.web'\ndata['salt'] = '15708737847078'\ndata['sign'] = '64037c1dd211ea7bd98321a3bd8ab45a'\ndata['smartresult'] = 'dict'\ndata['to'] = 'AUTO'\ndata['ts'] = '1570873784707'\ndata['version'] = '2.1'\ndata = urllib.parse.urlencode(data).encode('utf-8')\nresponse = urllib.request.urlopen(url, data)\nhtml = response.read().decode('utf-8')\ntarget = json.loads(html)\nprint('翻译结果:%s' % target['translateResult'][0][0]['tgt'])\n",
"step-4": "import urllib.request\nimport urllib.parse\nimport json\ncontent = input('请输入需要翻译的内容:')\nurl = 'http://fanyi.youdao.com/translate?smartresult=dict&smartresult=rule'\ndata = {}\ndata['action'] = 'FY_BY_CLICKBUTTION'\ndata['bv'] = '1ca13a5465c2ab126e616ee8d6720cc3'\ndata['client'] = 'fanyideskweb'\ndata['doctype'] = 'json'\ndata['from'] = 'AUTO'\ndata['i'] = content\ndata['keyfrom'] = 'fanyi.web'\ndata['salt'] = '15708737847078'\ndata['sign'] = '64037c1dd211ea7bd98321a3bd8ab45a'\ndata['smartresult'] = 'dict'\ndata['to'] = 'AUTO'\ndata['ts'] = '1570873784707'\ndata['version'] = '2.1'\ndata = urllib.parse.urlencode(data).encode('utf-8')\nresponse = urllib.request.urlopen(url, data)\nhtml = response.read().decode('utf-8')\ntarget = json.loads(html)\nprint('翻译结果:%s' % target['translateResult'][0][0]['tgt'])\n",
"step-5": "import urllib.request\nimport urllib.parse\nimport json\n\ncontent = input(\"请输入需要翻译的内容:\")\n\nurl = 'http://fanyi.youdao.com/translate?smartresult=dict&smartresult=rule'\n\ndata = {}\ndata['action'] = 'FY_BY_CLICKBUTTION'\ndata['bv'] = '1ca13a5465c2ab126e616ee8d6720cc3'\ndata['client'] = 'fanyideskweb'\ndata['doctype'] = 'json'\ndata['from'] = 'AUTO'\ndata['i'] = content\ndata['keyfrom'] = 'fanyi.web'\ndata['salt'] = '15708737847078'\ndata['sign'] = '64037c1dd211ea7bd98321a3bd8ab45a'\ndata['smartresult'] = 'dict'\ndata['to'] = 'AUTO'\ndata['ts'] = '1570873784707'\ndata['version'] = '2.1'\ndata = urllib.parse.urlencode(data).encode('utf-8')\n\nresponse = urllib.request.urlopen(url,data)\nhtml = response.read().decode('utf-8')\n\ntarget = json.loads(html)\nprint(\"翻译结果:%s\" % (target['translateResult'][0][0]['tgt']))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
questions = ('Какой язык мы учим?', 'Какой тип данных имеет целая переменная?', 'Какой тип данных имеет вещественная переменная?', 'Какой тип данных имеет логическая переменная?', 'Какой тип данных имеет символьная переменная?')
answers = ('Python', 'Integer', 'Float', 'Bool', 'String')
i = 0
count_answers = 0
while i < len(questions):
user_answers = input('{}...'.format(questions[i]))
if user_answers.capitalize() == answers[i]:
count_answers = count_answers + 1
i += 1
print('Было задано {i} вопросов. Правильных ответов - {count_answers}!'.format(i = i, count_answers = count_answers))
|
normal
|
{
"blob_id": "dd936839d71b97b3a21115498092d8984de0e3f1",
"index": 7445,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile i < len(questions):\n user_answers = input('{}...'.format(questions[i]))\n if user_answers.capitalize() == answers[i]:\n count_answers = count_answers + 1\n i += 1\nprint('Было задано {i} вопросов. Правильных ответов - {count_answers}!'.\n format(i=i, count_answers=count_answers))\n",
"step-3": "questions = ('Какой язык мы учим?',\n 'Какой тип данных имеет целая переменная?',\n 'Какой тип данных имеет вещественная переменная?',\n 'Какой тип данных имеет логическая переменная?',\n 'Какой тип данных имеет символьная переменная?')\nanswers = 'Python', 'Integer', 'Float', 'Bool', 'String'\ni = 0\ncount_answers = 0\nwhile i < len(questions):\n user_answers = input('{}...'.format(questions[i]))\n if user_answers.capitalize() == answers[i]:\n count_answers = count_answers + 1\n i += 1\nprint('Было задано {i} вопросов. Правильных ответов - {count_answers}!'.\n format(i=i, count_answers=count_answers))\n",
"step-4": "questions = ('Какой язык мы учим?', 'Какой тип данных имеет целая переменная?', 'Какой тип данных имеет вещественная переменная?', 'Какой тип данных имеет логическая переменная?', 'Какой тип данных имеет символьная переменная?')\nanswers = ('Python', 'Integer', 'Float', 'Bool', 'String')\ni = 0\ncount_answers = 0\nwhile i < len(questions):\n user_answers = input('{}...'.format(questions[i]))\n if user_answers.capitalize() == answers[i]:\n count_answers = count_answers + 1\n i += 1\nprint('Было задано {i} вопросов. Правильных ответов - {count_answers}!'.format(i = i, count_answers = count_answers))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import tensorflow as tf
from tensorflow.contrib import layers
from tensorflow.contrib.framework.python.ops import arg_scope
from tensorflow.contrib.layers.python.layers import initializers
from tensorflow.contrib.layers.python.layers import layers as layers_lib
from tensorflow.contrib.layers.python.layers import regularizers
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import variable_scope
import inception_2d
def inception_2d_fields(img,
fields,
num_classes=30,
is_training=True,
dropout_keep_prob=0.6,
prediction_fn=layers_lib.softmax,
spatial_squeeze=True,
reuse=None,
scope='InceptionV1_Fields'
):
with arg_scope([layers.conv2d, layers_lib.fully_connected],
weights_initializer=tf.contrib.layers.xavier_initializer(),
biases_initializer=tf.constant_initializer(0.2),
weights_regularizer=regularizers.l2_regularizer(0.0002),
biases_regularizer=regularizers.l2_regularizer(0.0002)):
net, end_points = inception_2d.inception_v1_base(img, scope=scope, final_endpoint='Mixed_4b')
with variable_scope.variable_scope('Logits'):
net = layers_lib.avg_pool2d(net, [5, 5], stride=3, scope='AvgPool_0a_5x5')
net = layers.conv2d(inputs=net, num_outputs=128, kernel_size=1)
net = tf.reshape(net, [-1, 1, 1, 4 * 4 * 128])
net = array_ops.squeeze(net,[1,2],name='Squeeze4Fields')
net = tf.concat([net,fields],axis=1)
net = layers.fully_connected(inputs=net, num_outputs=1024)
net = layers_lib.dropout(net, dropout_keep_prob, scope='Dropout_0b')
logits = layers.fully_connected(inputs=net,
num_outputs=num_classes,
activation_fn=None,
weights_initializer=tf.contrib.layers.xavier_initializer(),
biases_initializer=tf.constant_initializer(0.0),
weights_regularizer=regularizers.l2_regularizer(0.0002),
biases_regularizer=regularizers.l2_regularizer(0.0002),
scope='InnerProduct')
# logits = layers.conv2d(
# net,
# num_classes, [1, 1],
# activation_fn=None,
# normalizer_fn=None,
# scope='Conv2d_0c_1x1')
if spatial_squeeze:
logits = array_ops.squeeze(logits, [1, 2], name='SpatialSqueeze')
end_points['Logits'] = logits
end_points['Predictions'] = prediction_fn(logits, scope='Predictions')
return logits, end_points
|
normal
|
{
"blob_id": "ca93f49fbdc1d64e0616bca035a6043b3cc80ddc",
"index": 1485,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef inception_2d_fields(img, fields, num_classes=30, is_training=True,\n dropout_keep_prob=0.6, prediction_fn=layers_lib.softmax,\n spatial_squeeze=True, reuse=None, scope='InceptionV1_Fields'):\n with arg_scope([layers.conv2d, layers_lib.fully_connected],\n weights_initializer=tf.contrib.layers.xavier_initializer(),\n biases_initializer=tf.constant_initializer(0.2),\n weights_regularizer=regularizers.l2_regularizer(0.0002),\n biases_regularizer=regularizers.l2_regularizer(0.0002)):\n net, end_points = inception_2d.inception_v1_base(img, scope=scope,\n final_endpoint='Mixed_4b')\n with variable_scope.variable_scope('Logits'):\n net = layers_lib.avg_pool2d(net, [5, 5], stride=3, scope=\n 'AvgPool_0a_5x5')\n net = layers.conv2d(inputs=net, num_outputs=128, kernel_size=1)\n net = tf.reshape(net, [-1, 1, 1, 4 * 4 * 128])\n net = array_ops.squeeze(net, [1, 2], name='Squeeze4Fields')\n net = tf.concat([net, fields], axis=1)\n net = layers.fully_connected(inputs=net, num_outputs=1024)\n net = layers_lib.dropout(net, dropout_keep_prob, scope='Dropout_0b'\n )\n logits = layers.fully_connected(inputs=net, num_outputs=\n num_classes, activation_fn=None, weights_initializer=tf.\n contrib.layers.xavier_initializer(), biases_initializer=tf.\n constant_initializer(0.0), weights_regularizer=regularizers\n .l2_regularizer(0.0002), biases_regularizer=regularizers.\n l2_regularizer(0.0002), scope='InnerProduct')\n if spatial_squeeze:\n logits = array_ops.squeeze(logits, [1, 2], name=\n 'SpatialSqueeze')\n end_points['Logits'] = logits\n end_points['Predictions'] = prediction_fn(logits, scope=\n 'Predictions')\n return logits, end_points\n",
"step-3": "import tensorflow as tf\nfrom tensorflow.contrib import layers\nfrom tensorflow.contrib.framework.python.ops import arg_scope\nfrom tensorflow.contrib.layers.python.layers import initializers\nfrom tensorflow.contrib.layers.python.layers import layers as layers_lib\nfrom tensorflow.contrib.layers.python.layers import regularizers\nfrom tensorflow.python.framework import ops\nfrom tensorflow.python.ops import array_ops\nfrom tensorflow.python.ops import init_ops\nfrom tensorflow.python.ops import nn_ops\nfrom tensorflow.python.ops import variable_scope\nimport inception_2d\n\n\ndef inception_2d_fields(img, fields, num_classes=30, is_training=True,\n dropout_keep_prob=0.6, prediction_fn=layers_lib.softmax,\n spatial_squeeze=True, reuse=None, scope='InceptionV1_Fields'):\n with arg_scope([layers.conv2d, layers_lib.fully_connected],\n weights_initializer=tf.contrib.layers.xavier_initializer(),\n biases_initializer=tf.constant_initializer(0.2),\n weights_regularizer=regularizers.l2_regularizer(0.0002),\n biases_regularizer=regularizers.l2_regularizer(0.0002)):\n net, end_points = inception_2d.inception_v1_base(img, scope=scope,\n final_endpoint='Mixed_4b')\n with variable_scope.variable_scope('Logits'):\n net = layers_lib.avg_pool2d(net, [5, 5], stride=3, scope=\n 'AvgPool_0a_5x5')\n net = layers.conv2d(inputs=net, num_outputs=128, kernel_size=1)\n net = tf.reshape(net, [-1, 1, 1, 4 * 4 * 128])\n net = array_ops.squeeze(net, [1, 2], name='Squeeze4Fields')\n net = tf.concat([net, fields], axis=1)\n net = layers.fully_connected(inputs=net, num_outputs=1024)\n net = layers_lib.dropout(net, dropout_keep_prob, scope='Dropout_0b'\n )\n logits = layers.fully_connected(inputs=net, num_outputs=\n num_classes, activation_fn=None, weights_initializer=tf.\n contrib.layers.xavier_initializer(), biases_initializer=tf.\n constant_initializer(0.0), weights_regularizer=regularizers\n .l2_regularizer(0.0002), biases_regularizer=regularizers.\n l2_regularizer(0.0002), scope='InnerProduct')\n if spatial_squeeze:\n logits = array_ops.squeeze(logits, [1, 2], name=\n 'SpatialSqueeze')\n end_points['Logits'] = logits\n end_points['Predictions'] = prediction_fn(logits, scope=\n 'Predictions')\n return logits, end_points\n",
"step-4": "import tensorflow as tf\nfrom tensorflow.contrib import layers\nfrom tensorflow.contrib.framework.python.ops import arg_scope\nfrom tensorflow.contrib.layers.python.layers import initializers\nfrom tensorflow.contrib.layers.python.layers import layers as layers_lib\nfrom tensorflow.contrib.layers.python.layers import regularizers\nfrom tensorflow.python.framework import ops\nfrom tensorflow.python.ops import array_ops\nfrom tensorflow.python.ops import init_ops\nfrom tensorflow.python.ops import nn_ops\nfrom tensorflow.python.ops import variable_scope\n\nimport inception_2d\n\ndef inception_2d_fields(img,\n fields,\n num_classes=30,\n is_training=True,\n dropout_keep_prob=0.6,\n prediction_fn=layers_lib.softmax,\n spatial_squeeze=True,\n reuse=None,\n scope='InceptionV1_Fields'\n ):\n with arg_scope([layers.conv2d, layers_lib.fully_connected],\n weights_initializer=tf.contrib.layers.xavier_initializer(),\n biases_initializer=tf.constant_initializer(0.2),\n weights_regularizer=regularizers.l2_regularizer(0.0002),\n biases_regularizer=regularizers.l2_regularizer(0.0002)):\n net, end_points = inception_2d.inception_v1_base(img, scope=scope, final_endpoint='Mixed_4b')\n with variable_scope.variable_scope('Logits'):\n net = layers_lib.avg_pool2d(net, [5, 5], stride=3, scope='AvgPool_0a_5x5')\n net = layers.conv2d(inputs=net, num_outputs=128, kernel_size=1)\n net = tf.reshape(net, [-1, 1, 1, 4 * 4 * 128])\n net = array_ops.squeeze(net,[1,2],name='Squeeze4Fields')\n net = tf.concat([net,fields],axis=1)\n net = layers.fully_connected(inputs=net, num_outputs=1024)\n net = layers_lib.dropout(net, dropout_keep_prob, scope='Dropout_0b')\n logits = layers.fully_connected(inputs=net,\n num_outputs=num_classes,\n activation_fn=None,\n weights_initializer=tf.contrib.layers.xavier_initializer(),\n biases_initializer=tf.constant_initializer(0.0),\n weights_regularizer=regularizers.l2_regularizer(0.0002),\n biases_regularizer=regularizers.l2_regularizer(0.0002),\n scope='InnerProduct')\n # logits = layers.conv2d(\n # net,\n # num_classes, [1, 1],\n # activation_fn=None,\n # normalizer_fn=None,\n # scope='Conv2d_0c_1x1')\n if spatial_squeeze:\n logits = array_ops.squeeze(logits, [1, 2], name='SpatialSqueeze')\n\n end_points['Logits'] = logits\n end_points['Predictions'] = prediction_fn(logits, scope='Predictions')\n\n\n return logits, end_points\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
_base_ = "../model.py"
model = dict(
type="ImageClassifier",
task="classification",
pretrained=None,
backbone=dict(),
head=dict(in_channels=-1, loss=dict(type="CrossEntropyLoss", loss_weight=1.0), topk=(1, 5)),
)
checkpoint_config = dict(type="CheckpointHookWithValResults")
|
normal
|
{
"blob_id": "8bd5eff12e68f7145676f5e089b51376a82ab489",
"index": 3231,
"step-1": "<mask token>\n",
"step-2": "_base_ = '../model.py'\nmodel = dict(type='ImageClassifier', task='classification', pretrained=None,\n backbone=dict(), head=dict(in_channels=-1, loss=dict(type=\n 'CrossEntropyLoss', loss_weight=1.0), topk=(1, 5)))\ncheckpoint_config = dict(type='CheckpointHookWithValResults')\n",
"step-3": "_base_ = \"../model.py\"\n\nmodel = dict(\n type=\"ImageClassifier\",\n task=\"classification\",\n pretrained=None,\n backbone=dict(),\n head=dict(in_channels=-1, loss=dict(type=\"CrossEntropyLoss\", loss_weight=1.0), topk=(1, 5)),\n)\n\ncheckpoint_config = dict(type=\"CheckpointHookWithValResults\")\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
l = int(input("Enter lower range: "))
u = int(input("Enter upper range: "))
if(l<=0):
print "invalid"
if (u<=0):
print "invalid"
for num in range(l,u+1):
n = len(str(num))
sum = 0
temp = num
while temp > 0:
digit = temp % 10
sum += digit ** n
temp //= 10
if num == sum:
print(num)
|
normal
|
{
"blob_id": "42fa0aa98e2d3336bdb56cba97596d8532d46cb4",
"index": 2896,
"step-1": "l = int(input(\"Enter lower range: \"))\nu = int(input(\"Enter upper range: \"))\nif(l<=0):\n print \"invalid\"\nif (u<=0):\n print \"invalid\"\n for num in range(l,u+1):\n n = len(str(num))\n sum = 0\n temp = num\n while temp > 0:\n digit = temp % 10\n sum += digit ** n\n temp //= 10\n if num == sum:\n print(num)\n \n \n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import pandemic as pd
from typing import Sequence
def save_gml(path: str, peers: Sequence[pd.Peer]) -> bool:
try:
with open(path, "w") as file:
file.write(graph(peers))
except Exception:
return True
return False
def print_gml(peers: Sequence[pd.Peer]) -> None:
print(graph(peers))
def graph(peers: Sequence[pd.Peer]) -> str:
return(
'graph [' + '\n' +
'\t' + 'directed 1' + '\n' +
''.join(map(node, peers)) +
''.join(map(edge, peers)) +
']' + '\n'
)
def node(peer: pd.Peer):
if peer.data_infection is None:
return ""
return(
'\t' + 'node [' + '\n' +
'\t' + '\t' + 'id {}'.format(peer.id) + '\n' +
'\t' + '\t' + 'label "{}"'.format(node_label(peer)) + '\n' +
'\t' + ']' + '\n'
)
def node_label(peer: pd.Peer) -> str:
return "" if peer.data_patch is None else str(peer.data_patch.epoch)
def edge(peer: pd.Peer) -> str:
if peer.data_infection is None:
return ""
return(
'\t' + 'edge [' + '\n' +
'\t' + '\t' + 'source {}'.format(peer.data_infection.source) + '\n' +
'\t' + '\t' + 'target {}'.format(peer.data_infection.target) + '\n' +
'\t' + '\t' + 'label "{}"'.format(peer.data_infection.epoch) + '\n' +
'\t' + ']' + '\n'
)
|
normal
|
{
"blob_id": "cb0b963c0e5aadcb67b5ee5f055fb9b6f21892fc",
"index": 5292,
"step-1": "<mask token>\n\n\ndef node(peer: pd.Peer):\n if peer.data_infection is None:\n return ''\n return '\\t' + 'node [' + '\\n' + '\\t' + '\\t' + 'id {}'.format(peer.id\n ) + '\\n' + '\\t' + '\\t' + 'label \"{}\"'.format(node_label(peer)\n ) + '\\n' + '\\t' + ']' + '\\n'\n\n\n<mask token>\n\n\ndef edge(peer: pd.Peer) ->str:\n if peer.data_infection is None:\n return ''\n return '\\t' + 'edge [' + '\\n' + '\\t' + '\\t' + 'source {}'.format(peer.\n data_infection.source) + '\\n' + '\\t' + '\\t' + 'target {}'.format(peer\n .data_infection.target) + '\\n' + '\\t' + '\\t' + 'label \"{}\"'.format(peer\n .data_infection.epoch) + '\\n' + '\\t' + ']' + '\\n'\n",
"step-2": "<mask token>\n\n\ndef save_gml(path: str, peers: Sequence[pd.Peer]) ->bool:\n try:\n with open(path, 'w') as file:\n file.write(graph(peers))\n except Exception:\n return True\n return False\n\n\n<mask token>\n\n\ndef graph(peers: Sequence[pd.Peer]) ->str:\n return 'graph [' + '\\n' + '\\t' + 'directed 1' + '\\n' + ''.join(map(node,\n peers)) + ''.join(map(edge, peers)) + ']' + '\\n'\n\n\ndef node(peer: pd.Peer):\n if peer.data_infection is None:\n return ''\n return '\\t' + 'node [' + '\\n' + '\\t' + '\\t' + 'id {}'.format(peer.id\n ) + '\\n' + '\\t' + '\\t' + 'label \"{}\"'.format(node_label(peer)\n ) + '\\n' + '\\t' + ']' + '\\n'\n\n\ndef node_label(peer: pd.Peer) ->str:\n return '' if peer.data_patch is None else str(peer.data_patch.epoch)\n\n\ndef edge(peer: pd.Peer) ->str:\n if peer.data_infection is None:\n return ''\n return '\\t' + 'edge [' + '\\n' + '\\t' + '\\t' + 'source {}'.format(peer.\n data_infection.source) + '\\n' + '\\t' + '\\t' + 'target {}'.format(peer\n .data_infection.target) + '\\n' + '\\t' + '\\t' + 'label \"{}\"'.format(peer\n .data_infection.epoch) + '\\n' + '\\t' + ']' + '\\n'\n",
"step-3": "<mask token>\n\n\ndef save_gml(path: str, peers: Sequence[pd.Peer]) ->bool:\n try:\n with open(path, 'w') as file:\n file.write(graph(peers))\n except Exception:\n return True\n return False\n\n\ndef print_gml(peers: Sequence[pd.Peer]) ->None:\n print(graph(peers))\n\n\ndef graph(peers: Sequence[pd.Peer]) ->str:\n return 'graph [' + '\\n' + '\\t' + 'directed 1' + '\\n' + ''.join(map(node,\n peers)) + ''.join(map(edge, peers)) + ']' + '\\n'\n\n\ndef node(peer: pd.Peer):\n if peer.data_infection is None:\n return ''\n return '\\t' + 'node [' + '\\n' + '\\t' + '\\t' + 'id {}'.format(peer.id\n ) + '\\n' + '\\t' + '\\t' + 'label \"{}\"'.format(node_label(peer)\n ) + '\\n' + '\\t' + ']' + '\\n'\n\n\ndef node_label(peer: pd.Peer) ->str:\n return '' if peer.data_patch is None else str(peer.data_patch.epoch)\n\n\ndef edge(peer: pd.Peer) ->str:\n if peer.data_infection is None:\n return ''\n return '\\t' + 'edge [' + '\\n' + '\\t' + '\\t' + 'source {}'.format(peer.\n data_infection.source) + '\\n' + '\\t' + '\\t' + 'target {}'.format(peer\n .data_infection.target) + '\\n' + '\\t' + '\\t' + 'label \"{}\"'.format(peer\n .data_infection.epoch) + '\\n' + '\\t' + ']' + '\\n'\n",
"step-4": "import pandemic as pd\nfrom typing import Sequence\n\n\ndef save_gml(path: str, peers: Sequence[pd.Peer]) ->bool:\n try:\n with open(path, 'w') as file:\n file.write(graph(peers))\n except Exception:\n return True\n return False\n\n\ndef print_gml(peers: Sequence[pd.Peer]) ->None:\n print(graph(peers))\n\n\ndef graph(peers: Sequence[pd.Peer]) ->str:\n return 'graph [' + '\\n' + '\\t' + 'directed 1' + '\\n' + ''.join(map(node,\n peers)) + ''.join(map(edge, peers)) + ']' + '\\n'\n\n\ndef node(peer: pd.Peer):\n if peer.data_infection is None:\n return ''\n return '\\t' + 'node [' + '\\n' + '\\t' + '\\t' + 'id {}'.format(peer.id\n ) + '\\n' + '\\t' + '\\t' + 'label \"{}\"'.format(node_label(peer)\n ) + '\\n' + '\\t' + ']' + '\\n'\n\n\ndef node_label(peer: pd.Peer) ->str:\n return '' if peer.data_patch is None else str(peer.data_patch.epoch)\n\n\ndef edge(peer: pd.Peer) ->str:\n if peer.data_infection is None:\n return ''\n return '\\t' + 'edge [' + '\\n' + '\\t' + '\\t' + 'source {}'.format(peer.\n data_infection.source) + '\\n' + '\\t' + '\\t' + 'target {}'.format(peer\n .data_infection.target) + '\\n' + '\\t' + '\\t' + 'label \"{}\"'.format(peer\n .data_infection.epoch) + '\\n' + '\\t' + ']' + '\\n'\n",
"step-5": "import pandemic as pd\nfrom typing import Sequence\n\n\ndef save_gml(path: str, peers: Sequence[pd.Peer]) -> bool:\n try:\n with open(path, \"w\") as file:\n file.write(graph(peers))\n except Exception:\n return True\n\n return False\n\n\ndef print_gml(peers: Sequence[pd.Peer]) -> None:\n print(graph(peers))\n\n\ndef graph(peers: Sequence[pd.Peer]) -> str:\n return(\n 'graph [' + '\\n' +\n '\\t' + 'directed 1' + '\\n' +\n ''.join(map(node, peers)) +\n ''.join(map(edge, peers)) +\n ']' + '\\n'\n )\n\n\ndef node(peer: pd.Peer):\n if peer.data_infection is None:\n return \"\"\n\n return(\n '\\t' + 'node [' + '\\n' +\n '\\t' + '\\t' + 'id {}'.format(peer.id) + '\\n' +\n '\\t' + '\\t' + 'label \"{}\"'.format(node_label(peer)) + '\\n' +\n '\\t' + ']' + '\\n'\n )\n\n\ndef node_label(peer: pd.Peer) -> str:\n return \"\" if peer.data_patch is None else str(peer.data_patch.epoch)\n\n\ndef edge(peer: pd.Peer) -> str:\n if peer.data_infection is None:\n return \"\"\n\n return(\n '\\t' + 'edge [' + '\\n' +\n '\\t' + '\\t' + 'source {}'.format(peer.data_infection.source) + '\\n' +\n '\\t' + '\\t' + 'target {}'.format(peer.data_infection.target) + '\\n' +\n '\\t' + '\\t' + 'label \"{}\"'.format(peer.data_infection.epoch) + '\\n' +\n '\\t' + ']' + '\\n'\n )\n",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
#!/usr/bin/env python
import sys,re
print('\n'.join(re.findall(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+',sys.stdin.read())))
|
normal
|
{
"blob_id": "4cefaa964251e77a05066af1f61f9fd2a4350d38",
"index": 7622,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('\\n'.join(re.findall(\n 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\\\(\\\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'\n , sys.stdin.read())))\n",
"step-3": "import sys, re\nprint('\\n'.join(re.findall(\n 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\\\(\\\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'\n , sys.stdin.read())))\n",
"step-4": "#!/usr/bin/env python\nimport sys,re\nprint('\\n'.join(re.findall(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+',sys.stdin.read())))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from more_itertools import ilen
from my.body import weight, shower, food, water
def test_body() ->None:
for func in (weight, shower, food, water):
assert ilen(func()) >= 1
|
normal
|
{
"blob_id": "e06b740f27e41b9f120c962fd76a38a29d54af3c",
"index": 973,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_body() ->None:\n for func in (weight, shower, food, water):\n assert ilen(func()) >= 1\n",
"step-3": "from more_itertools import ilen\nfrom my.body import weight, shower, food, water\n\n\ndef test_body() ->None:\n for func in (weight, shower, food, water):\n assert ilen(func()) >= 1\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from setuptools import setup, find_packages
from setuptools.extension import Extension
from sys import platform
cython = True
try:
from Cython.Build import cythonize
cython = True
except ImportError:
cython = False
# Define the C++ extension
if platform == "darwin":
extra_compile_args = ['-O3', '-pthread', '-funroll-loops', '-std=c++0x', '-stdlib=libc++', '-mmacosx-version-min=10.7']
else:
extra_compile_args = ['-O3', '-pthread', '-funroll-loops', '-std=c++0x']
extensions = []
if cython:
extensions = [
Extension('sent2vec',
sources=[
'sent2vec/sent2vec.pyx',
'sent2vec/cpp/src/args.cc',
'sent2vec/cpp/src/dictionary.cc',
'sent2vec/cpp/src/fasttext.cc',
'sent2vec/cpp/src/main.cc',
'sent2vec/cpp/src/matrix.cc',
'sent2vec/cpp/src/model.cc',
'sent2vec/cpp/src/productquantizer.cc',
'sent2vec/cpp/src/qmatrix.cc',
'sent2vec/cpp/src/utils.cc',
'sent2vec/cpp/src/vector.cc'
],
language='c++',
extra_compile_args=extra_compile_args
)
]
extensions = cythonize(extensions)
else:
extensions = [
Extension('sent2vec',
sources=[
'sent2vec/sent2vec.cpp',
'sent2vec/cpp/src/args.cc',
'sent2vec/cpp/src/dictionary.cc',
'sent2vec/cpp/src/fasttext.cc',
'sent2vec/cpp/src/main.cc',
'sent2vec/cpp/src/matrix.cc',
'sent2vec/cpp/src/model.cc',
'sent2vec/cpp/src/productquantizer.cc',
'sent2vec/cpp/src/qmatrix.cc',
'sent2vec/cpp/src/utils.cc',
'sent2vec/cpp/src/vector.cc'
],
language='c++',
extra_compile_args=extra_compile_args
)
]
# Package details
setup(
name='sent2vec',
version='0.1.0',
author='',
author_email='',
url='',
description='A Python interface for sent2vec library',
license='BSD 3-Clause License',
packages=['sent2vec'],
ext_modules = extensions,
install_requires=[],
classifiers= []
)
|
normal
|
{
"blob_id": "312cc666c88fcd22882c49598db8c5e18bd3dae1",
"index": 26,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntry:\n from Cython.Build import cythonize\n cython = True\nexcept ImportError:\n cython = False\nif platform == 'darwin':\n extra_compile_args = ['-O3', '-pthread', '-funroll-loops', '-std=c++0x',\n '-stdlib=libc++', '-mmacosx-version-min=10.7']\nelse:\n extra_compile_args = ['-O3', '-pthread', '-funroll-loops', '-std=c++0x']\n<mask token>\nif cython:\n extensions = [Extension('sent2vec', sources=['sent2vec/sent2vec.pyx',\n 'sent2vec/cpp/src/args.cc', 'sent2vec/cpp/src/dictionary.cc',\n 'sent2vec/cpp/src/fasttext.cc', 'sent2vec/cpp/src/main.cc',\n 'sent2vec/cpp/src/matrix.cc', 'sent2vec/cpp/src/model.cc',\n 'sent2vec/cpp/src/productquantizer.cc',\n 'sent2vec/cpp/src/qmatrix.cc', 'sent2vec/cpp/src/utils.cc',\n 'sent2vec/cpp/src/vector.cc'], language='c++', extra_compile_args=\n extra_compile_args)]\n extensions = cythonize(extensions)\nelse:\n extensions = [Extension('sent2vec', sources=['sent2vec/sent2vec.cpp',\n 'sent2vec/cpp/src/args.cc', 'sent2vec/cpp/src/dictionary.cc',\n 'sent2vec/cpp/src/fasttext.cc', 'sent2vec/cpp/src/main.cc',\n 'sent2vec/cpp/src/matrix.cc', 'sent2vec/cpp/src/model.cc',\n 'sent2vec/cpp/src/productquantizer.cc',\n 'sent2vec/cpp/src/qmatrix.cc', 'sent2vec/cpp/src/utils.cc',\n 'sent2vec/cpp/src/vector.cc'], language='c++', extra_compile_args=\n extra_compile_args)]\nsetup(name='sent2vec', version='0.1.0', author='', author_email='', url='',\n description='A Python interface for sent2vec library', license=\n 'BSD 3-Clause License', packages=['sent2vec'], ext_modules=extensions,\n install_requires=[], classifiers=[])\n",
"step-3": "<mask token>\ncython = True\ntry:\n from Cython.Build import cythonize\n cython = True\nexcept ImportError:\n cython = False\nif platform == 'darwin':\n extra_compile_args = ['-O3', '-pthread', '-funroll-loops', '-std=c++0x',\n '-stdlib=libc++', '-mmacosx-version-min=10.7']\nelse:\n extra_compile_args = ['-O3', '-pthread', '-funroll-loops', '-std=c++0x']\nextensions = []\nif cython:\n extensions = [Extension('sent2vec', sources=['sent2vec/sent2vec.pyx',\n 'sent2vec/cpp/src/args.cc', 'sent2vec/cpp/src/dictionary.cc',\n 'sent2vec/cpp/src/fasttext.cc', 'sent2vec/cpp/src/main.cc',\n 'sent2vec/cpp/src/matrix.cc', 'sent2vec/cpp/src/model.cc',\n 'sent2vec/cpp/src/productquantizer.cc',\n 'sent2vec/cpp/src/qmatrix.cc', 'sent2vec/cpp/src/utils.cc',\n 'sent2vec/cpp/src/vector.cc'], language='c++', extra_compile_args=\n extra_compile_args)]\n extensions = cythonize(extensions)\nelse:\n extensions = [Extension('sent2vec', sources=['sent2vec/sent2vec.cpp',\n 'sent2vec/cpp/src/args.cc', 'sent2vec/cpp/src/dictionary.cc',\n 'sent2vec/cpp/src/fasttext.cc', 'sent2vec/cpp/src/main.cc',\n 'sent2vec/cpp/src/matrix.cc', 'sent2vec/cpp/src/model.cc',\n 'sent2vec/cpp/src/productquantizer.cc',\n 'sent2vec/cpp/src/qmatrix.cc', 'sent2vec/cpp/src/utils.cc',\n 'sent2vec/cpp/src/vector.cc'], language='c++', extra_compile_args=\n extra_compile_args)]\nsetup(name='sent2vec', version='0.1.0', author='', author_email='', url='',\n description='A Python interface for sent2vec library', license=\n 'BSD 3-Clause License', packages=['sent2vec'], ext_modules=extensions,\n install_requires=[], classifiers=[])\n",
"step-4": "from setuptools import setup, find_packages\nfrom setuptools.extension import Extension\nfrom sys import platform\ncython = True\ntry:\n from Cython.Build import cythonize\n cython = True\nexcept ImportError:\n cython = False\nif platform == 'darwin':\n extra_compile_args = ['-O3', '-pthread', '-funroll-loops', '-std=c++0x',\n '-stdlib=libc++', '-mmacosx-version-min=10.7']\nelse:\n extra_compile_args = ['-O3', '-pthread', '-funroll-loops', '-std=c++0x']\nextensions = []\nif cython:\n extensions = [Extension('sent2vec', sources=['sent2vec/sent2vec.pyx',\n 'sent2vec/cpp/src/args.cc', 'sent2vec/cpp/src/dictionary.cc',\n 'sent2vec/cpp/src/fasttext.cc', 'sent2vec/cpp/src/main.cc',\n 'sent2vec/cpp/src/matrix.cc', 'sent2vec/cpp/src/model.cc',\n 'sent2vec/cpp/src/productquantizer.cc',\n 'sent2vec/cpp/src/qmatrix.cc', 'sent2vec/cpp/src/utils.cc',\n 'sent2vec/cpp/src/vector.cc'], language='c++', extra_compile_args=\n extra_compile_args)]\n extensions = cythonize(extensions)\nelse:\n extensions = [Extension('sent2vec', sources=['sent2vec/sent2vec.cpp',\n 'sent2vec/cpp/src/args.cc', 'sent2vec/cpp/src/dictionary.cc',\n 'sent2vec/cpp/src/fasttext.cc', 'sent2vec/cpp/src/main.cc',\n 'sent2vec/cpp/src/matrix.cc', 'sent2vec/cpp/src/model.cc',\n 'sent2vec/cpp/src/productquantizer.cc',\n 'sent2vec/cpp/src/qmatrix.cc', 'sent2vec/cpp/src/utils.cc',\n 'sent2vec/cpp/src/vector.cc'], language='c++', extra_compile_args=\n extra_compile_args)]\nsetup(name='sent2vec', version='0.1.0', author='', author_email='', url='',\n description='A Python interface for sent2vec library', license=\n 'BSD 3-Clause License', packages=['sent2vec'], ext_modules=extensions,\n install_requires=[], classifiers=[])\n",
"step-5": "from setuptools import setup, find_packages\nfrom setuptools.extension import Extension\nfrom sys import platform\n\ncython = True\n\ntry:\n from Cython.Build import cythonize\n cython = True\nexcept ImportError:\n cython = False\n\n# Define the C++ extension\nif platform == \"darwin\":\n extra_compile_args = ['-O3', '-pthread', '-funroll-loops', '-std=c++0x', '-stdlib=libc++', '-mmacosx-version-min=10.7']\nelse:\n extra_compile_args = ['-O3', '-pthread', '-funroll-loops', '-std=c++0x']\n\nextensions = []\n\nif cython:\n extensions = [\n Extension('sent2vec',\n sources=[\n 'sent2vec/sent2vec.pyx',\n 'sent2vec/cpp/src/args.cc',\n 'sent2vec/cpp/src/dictionary.cc',\n 'sent2vec/cpp/src/fasttext.cc',\n 'sent2vec/cpp/src/main.cc',\n 'sent2vec/cpp/src/matrix.cc',\n 'sent2vec/cpp/src/model.cc',\n 'sent2vec/cpp/src/productquantizer.cc',\n 'sent2vec/cpp/src/qmatrix.cc',\n 'sent2vec/cpp/src/utils.cc',\n 'sent2vec/cpp/src/vector.cc'\n ],\n language='c++',\n extra_compile_args=extra_compile_args\n )\n ]\n\n extensions = cythonize(extensions)\nelse:\n extensions = [\n Extension('sent2vec',\n sources=[\n 'sent2vec/sent2vec.cpp',\n 'sent2vec/cpp/src/args.cc',\n 'sent2vec/cpp/src/dictionary.cc',\n 'sent2vec/cpp/src/fasttext.cc',\n 'sent2vec/cpp/src/main.cc',\n 'sent2vec/cpp/src/matrix.cc',\n 'sent2vec/cpp/src/model.cc',\n 'sent2vec/cpp/src/productquantizer.cc',\n 'sent2vec/cpp/src/qmatrix.cc',\n 'sent2vec/cpp/src/utils.cc',\n 'sent2vec/cpp/src/vector.cc'\n ],\n language='c++',\n extra_compile_args=extra_compile_args\n )\n ]\n\n# Package details\nsetup(\n name='sent2vec',\n version='0.1.0',\n author='',\n author_email='',\n url='',\n description='A Python interface for sent2vec library',\n license='BSD 3-Clause License',\n packages=['sent2vec'],\n ext_modules = extensions,\n install_requires=[],\n classifiers= []\n)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
cursor.executemany('INSERT INTO persone VALUES (null,?,?)', dipendenti)
conn.commit()
conn.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
conn = sqlite3.connect('19-BD/prove.db')
cursor = conn.cursor()
dipendenti = [('Sofia', 'commessa'), ('Diego', 'tecnico'), ('Lucia',
'cassiera'), ('Luca', 'Magazziniere'), ('Pablo', 'Capo reparto')]
cursor.executemany('INSERT INTO persone VALUES (null,?,?)', dipendenti)
conn.commit()
conn.close()
<|reserved_special_token_1|>
import sqlite3
conn = sqlite3.connect('19-BD/prove.db')
cursor = conn.cursor()
dipendenti = [('Sofia', 'commessa'), ('Diego', 'tecnico'), ('Lucia',
'cassiera'), ('Luca', 'Magazziniere'), ('Pablo', 'Capo reparto')]
cursor.executemany('INSERT INTO persone VALUES (null,?,?)', dipendenti)
conn.commit()
conn.close()
<|reserved_special_token_1|>
import sqlite3
conn = sqlite3.connect("19-BD/prove.db")
cursor = conn.cursor()
dipendenti = [
("Sofia","commessa"),
("Diego","tecnico"),
("Lucia","cassiera"),
("Luca","Magazziniere"),
("Pablo","Capo reparto")
]
cursor.executemany("INSERT INTO persone VALUES (null,?,?)", dipendenti)
conn.commit()
conn.close()
|
flexible
|
{
"blob_id": "3e1ca6ed4668e75a62baa65ef44346dd86a16491",
"index": 3093,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncursor.executemany('INSERT INTO persone VALUES (null,?,?)', dipendenti)\nconn.commit()\nconn.close()\n",
"step-3": "<mask token>\nconn = sqlite3.connect('19-BD/prove.db')\ncursor = conn.cursor()\ndipendenti = [('Sofia', 'commessa'), ('Diego', 'tecnico'), ('Lucia',\n 'cassiera'), ('Luca', 'Magazziniere'), ('Pablo', 'Capo reparto')]\ncursor.executemany('INSERT INTO persone VALUES (null,?,?)', dipendenti)\nconn.commit()\nconn.close()\n",
"step-4": "import sqlite3\nconn = sqlite3.connect('19-BD/prove.db')\ncursor = conn.cursor()\ndipendenti = [('Sofia', 'commessa'), ('Diego', 'tecnico'), ('Lucia',\n 'cassiera'), ('Luca', 'Magazziniere'), ('Pablo', 'Capo reparto')]\ncursor.executemany('INSERT INTO persone VALUES (null,?,?)', dipendenti)\nconn.commit()\nconn.close()\n",
"step-5": "import sqlite3\n\nconn = sqlite3.connect(\"19-BD/prove.db\")\ncursor = conn.cursor()\n\ndipendenti = [\n (\"Sofia\",\"commessa\"),\n (\"Diego\",\"tecnico\"),\n (\"Lucia\",\"cassiera\"),\n (\"Luca\",\"Magazziniere\"),\n (\"Pablo\",\"Capo reparto\")\n]\n\ncursor.executemany(\"INSERT INTO persone VALUES (null,?,?)\", dipendenti)\nconn.commit()\n\nconn.close()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
# 导入包
import matplotlib.pyplot as plt
import numpy as np
# 显示中文和显示负号
plt.rcParams['font.sans-serif'] = ['SimHei']
plt.rcParams['axes.unicode_minus'] = False
# X轴和Y轴数据,票房单位亿
a = ["战狼2","速度与激情8","功夫瑜伽","西游伏妖篇","变形金刚5:最后的骑士","摔跤吧!爸爸","加勒比海盗5:死无对证","金刚:骷髅岛","极限特工:终极回归","生化危机6:终章","乘风破浪","神偷奶爸3","智取威虎山","大闹天竺","金刚狼3:殊死一战","蜘蛛侠:英雄归来","悟空传","银河护卫队2","情圣","新木乃伊",]
b = [56.01,26.94,17.53,16.49,15.45,12.96,11.8,11.61,11.28,11.12,10.49,10.3,8.75,7.55,7.32,6.99,6.88,6.86,6.58,6.23]
# 设置图形的大小
plt.figure(figsize=(20, 8), dpi=128)
# 绘制横置条形图,x轴参数是一个可迭代对象,一般为列表
# 竖直条形图,用的是width设置宽度
plt.barh(a, b, height=0.5, color='red')
# 设置图片,X轴,Y轴标题
plt.title("2018年电影票房纪录", fontsize=24)
plt.xlabel("票房(亿元)", fontsize=14)
# 设置坐标轴刻度,刻度间隔,range不能设置步长
my_x_ticks = np.arange(0, 61, 5)
plt.xticks(my_x_ticks)
# 设置网格
plt.grid(axis='both', color='grey', linestyle='-.', alpha=0.5)
# 显示图形
plt.show()
|
normal
|
{
"blob_id": "16d86c48c45ab0441046e968ea364d27f6dcfd12",
"index": 3066,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nplt.figure(figsize=(20, 8), dpi=128)\nplt.barh(a, b, height=0.5, color='red')\nplt.title('2018年电影票房纪录', fontsize=24)\nplt.xlabel('票房(亿元)', fontsize=14)\n<mask token>\nplt.xticks(my_x_ticks)\nplt.grid(axis='both', color='grey', linestyle='-.', alpha=0.5)\nplt.show()\n",
"step-3": "<mask token>\nplt.rcParams['font.sans-serif'] = ['SimHei']\nplt.rcParams['axes.unicode_minus'] = False\na = ['战狼2', '速度与激情8', '功夫瑜伽', '西游伏妖篇', '变形金刚5:最后的骑士', '摔跤吧!爸爸',\n '加勒比海盗5:死无对证', '金刚:骷髅岛', '极限特工:终极回归', '生化危机6:终章', '乘风破浪', '神偷奶爸3',\n '智取威虎山', '大闹天竺', '金刚狼3:殊死一战', '蜘蛛侠:英雄归来', '悟空传', '银河护卫队2', '情圣', '新木乃伊']\nb = [56.01, 26.94, 17.53, 16.49, 15.45, 12.96, 11.8, 11.61, 11.28, 11.12, \n 10.49, 10.3, 8.75, 7.55, 7.32, 6.99, 6.88, 6.86, 6.58, 6.23]\nplt.figure(figsize=(20, 8), dpi=128)\nplt.barh(a, b, height=0.5, color='red')\nplt.title('2018年电影票房纪录', fontsize=24)\nplt.xlabel('票房(亿元)', fontsize=14)\nmy_x_ticks = np.arange(0, 61, 5)\nplt.xticks(my_x_ticks)\nplt.grid(axis='both', color='grey', linestyle='-.', alpha=0.5)\nplt.show()\n",
"step-4": "import matplotlib.pyplot as plt\nimport numpy as np\nplt.rcParams['font.sans-serif'] = ['SimHei']\nplt.rcParams['axes.unicode_minus'] = False\na = ['战狼2', '速度与激情8', '功夫瑜伽', '西游伏妖篇', '变形金刚5:最后的骑士', '摔跤吧!爸爸',\n '加勒比海盗5:死无对证', '金刚:骷髅岛', '极限特工:终极回归', '生化危机6:终章', '乘风破浪', '神偷奶爸3',\n '智取威虎山', '大闹天竺', '金刚狼3:殊死一战', '蜘蛛侠:英雄归来', '悟空传', '银河护卫队2', '情圣', '新木乃伊']\nb = [56.01, 26.94, 17.53, 16.49, 15.45, 12.96, 11.8, 11.61, 11.28, 11.12, \n 10.49, 10.3, 8.75, 7.55, 7.32, 6.99, 6.88, 6.86, 6.58, 6.23]\nplt.figure(figsize=(20, 8), dpi=128)\nplt.barh(a, b, height=0.5, color='red')\nplt.title('2018年电影票房纪录', fontsize=24)\nplt.xlabel('票房(亿元)', fontsize=14)\nmy_x_ticks = np.arange(0, 61, 5)\nplt.xticks(my_x_ticks)\nplt.grid(axis='both', color='grey', linestyle='-.', alpha=0.5)\nplt.show()\n",
"step-5": "# -*- coding: utf-8 -*-\n\n# 导入包\nimport matplotlib.pyplot as plt\nimport numpy as np\n\n# 显示中文和显示负号\nplt.rcParams['font.sans-serif'] = ['SimHei']\nplt.rcParams['axes.unicode_minus'] = False\n\n# X轴和Y轴数据,票房单位亿\na = [\"战狼2\",\"速度与激情8\",\"功夫瑜伽\",\"西游伏妖篇\",\"变形金刚5:最后的骑士\",\"摔跤吧!爸爸\",\"加勒比海盗5:死无对证\",\"金刚:骷髅岛\",\"极限特工:终极回归\",\"生化危机6:终章\",\"乘风破浪\",\"神偷奶爸3\",\"智取威虎山\",\"大闹天竺\",\"金刚狼3:殊死一战\",\"蜘蛛侠:英雄归来\",\"悟空传\",\"银河护卫队2\",\"情圣\",\"新木乃伊\",]\nb = [56.01,26.94,17.53,16.49,15.45,12.96,11.8,11.61,11.28,11.12,10.49,10.3,8.75,7.55,7.32,6.99,6.88,6.86,6.58,6.23]\n\n# 设置图形的大小\nplt.figure(figsize=(20, 8), dpi=128)\n\n# 绘制横置条形图,x轴参数是一个可迭代对象,一般为列表\n# 竖直条形图,用的是width设置宽度\nplt.barh(a, b, height=0.5, color='red')\n\n# 设置图片,X轴,Y轴标题\nplt.title(\"2018年电影票房纪录\", fontsize=24)\nplt.xlabel(\"票房(亿元)\", fontsize=14)\n\n# 设置坐标轴刻度,刻度间隔,range不能设置步长\nmy_x_ticks = np.arange(0, 61, 5)\nplt.xticks(my_x_ticks)\n\n# 设置网格\nplt.grid(axis='both', color='grey', linestyle='-.', alpha=0.5)\n\n# 显示图形\nplt.show()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
import sys
import string
from array import *
from datetime import datetime
#f = open('input_test.txt', 'r')
f = open('input_task.txt', 'r')
width = 60
height = 5000
sleepingMinutes = [[0 for x in range(width)] for y in range(height)]
infos = []
# Change lines to tuples and store to array for sorting
for line in f:
line = line.rstrip('\n')
line = line.replace('[','')
splitted = line.split(']')
stringTime = splitted[0]
stringTask = splitted[1]
datetimeTime = datetime.strptime(stringTime, '%Y-%m-%d %H:%M')
lineTuple = (datetimeTime, stringTask)
infos.append(lineTuple)
#print(datetimeTime.minute)
# sort the info we have
infosSorted = sorted(infos, key=lambda time: time[0])
#print(infos)
#print(infosSorted)
sleeping = False
for dataPoint in infosSorted:
splitted = dataPoint[1].split(' ')
#print(splitted)
if splitted[1] == 'Guard':
#print('Vartija vaihtui, vuorossa: ' + splitted[2])
guard = splitted[2].replace('#','')
if splitted[1] == 'falls':
sleeping = True
sleepingTimeStart = dataPoint[0]
#print('vartija ' + guard + ' nukahti hetkellä ' + str(sleepingTimeStart))
if splitted[1] == 'wakes':
sleeping = False
sleepingTimeStop = dataPoint[0]
sleepingTime = sleepingTimeStop - sleepingTimeStart
#print('vartija ' + guard + ' heräsi hetkellä ' + str(sleepingTimeStop) + ' nukkuen ' + str(sleepingTime))
for x in range(sleepingTimeStart.minute, sleepingTimeStop.minute):
sleepingMinutes[int(guard)][x] += 1
maxVartija = 0
maxMinuutti = 0
maxMinuutit = 0
vartija = 0
for x in sleepingMinutes:
summa = sum(x)
minuutti = x.index(max(x))
#print(x)
#print('yhteensä ' + str(summa) + ' nukkui eniten minuutilla ' + str(maxMinuutti))
if maxVartija < summa:
maxVartija = vartija
maxMinuutti = minuutti
maxMinuutit = summa
vartija += 1
print('Eniten nukkui vartija #' + str(maxVartija) + ' nukkuen yhteensä ' + str(maxMinuutit) + ' minuuttia ja eniten minuutilla ' + str(maxMinuutti))
print('Vastaus on siis ' + str(maxVartija*maxMinuutti))
|
normal
|
{
"blob_id": "293533d07b530be9e8f97f1720619bf6c3113cca",
"index": 9447,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor line in f:\n line = line.rstrip('\\n')\n line = line.replace('[', '')\n splitted = line.split(']')\n stringTime = splitted[0]\n stringTask = splitted[1]\n datetimeTime = datetime.strptime(stringTime, '%Y-%m-%d %H:%M')\n lineTuple = datetimeTime, stringTask\n infos.append(lineTuple)\n<mask token>\nfor dataPoint in infosSorted:\n splitted = dataPoint[1].split(' ')\n if splitted[1] == 'Guard':\n guard = splitted[2].replace('#', '')\n if splitted[1] == 'falls':\n sleeping = True\n sleepingTimeStart = dataPoint[0]\n if splitted[1] == 'wakes':\n sleeping = False\n sleepingTimeStop = dataPoint[0]\n sleepingTime = sleepingTimeStop - sleepingTimeStart\n for x in range(sleepingTimeStart.minute, sleepingTimeStop.minute):\n sleepingMinutes[int(guard)][x] += 1\n<mask token>\nfor x in sleepingMinutes:\n summa = sum(x)\n minuutti = x.index(max(x))\n if maxVartija < summa:\n maxVartija = vartija\n maxMinuutti = minuutti\n maxMinuutit = summa\n vartija += 1\nprint('Eniten nukkui vartija #' + str(maxVartija) + ' nukkuen yhteensä ' +\n str(maxMinuutit) + ' minuuttia ja eniten minuutilla ' + str(maxMinuutti))\nprint('Vastaus on siis ' + str(maxVartija * maxMinuutti))\n",
"step-3": "<mask token>\nf = open('input_task.txt', 'r')\nwidth = 60\nheight = 5000\nsleepingMinutes = [[(0) for x in range(width)] for y in range(height)]\ninfos = []\nfor line in f:\n line = line.rstrip('\\n')\n line = line.replace('[', '')\n splitted = line.split(']')\n stringTime = splitted[0]\n stringTask = splitted[1]\n datetimeTime = datetime.strptime(stringTime, '%Y-%m-%d %H:%M')\n lineTuple = datetimeTime, stringTask\n infos.append(lineTuple)\ninfosSorted = sorted(infos, key=lambda time: time[0])\nsleeping = False\nfor dataPoint in infosSorted:\n splitted = dataPoint[1].split(' ')\n if splitted[1] == 'Guard':\n guard = splitted[2].replace('#', '')\n if splitted[1] == 'falls':\n sleeping = True\n sleepingTimeStart = dataPoint[0]\n if splitted[1] == 'wakes':\n sleeping = False\n sleepingTimeStop = dataPoint[0]\n sleepingTime = sleepingTimeStop - sleepingTimeStart\n for x in range(sleepingTimeStart.minute, sleepingTimeStop.minute):\n sleepingMinutes[int(guard)][x] += 1\nmaxVartija = 0\nmaxMinuutti = 0\nmaxMinuutit = 0\nvartija = 0\nfor x in sleepingMinutes:\n summa = sum(x)\n minuutti = x.index(max(x))\n if maxVartija < summa:\n maxVartija = vartija\n maxMinuutti = minuutti\n maxMinuutit = summa\n vartija += 1\nprint('Eniten nukkui vartija #' + str(maxVartija) + ' nukkuen yhteensä ' +\n str(maxMinuutit) + ' minuuttia ja eniten minuutilla ' + str(maxMinuutti))\nprint('Vastaus on siis ' + str(maxVartija * maxMinuutti))\n",
"step-4": "import os\nimport sys\nimport string\nfrom array import *\nfrom datetime import datetime\nf = open('input_task.txt', 'r')\nwidth = 60\nheight = 5000\nsleepingMinutes = [[(0) for x in range(width)] for y in range(height)]\ninfos = []\nfor line in f:\n line = line.rstrip('\\n')\n line = line.replace('[', '')\n splitted = line.split(']')\n stringTime = splitted[0]\n stringTask = splitted[1]\n datetimeTime = datetime.strptime(stringTime, '%Y-%m-%d %H:%M')\n lineTuple = datetimeTime, stringTask\n infos.append(lineTuple)\ninfosSorted = sorted(infos, key=lambda time: time[0])\nsleeping = False\nfor dataPoint in infosSorted:\n splitted = dataPoint[1].split(' ')\n if splitted[1] == 'Guard':\n guard = splitted[2].replace('#', '')\n if splitted[1] == 'falls':\n sleeping = True\n sleepingTimeStart = dataPoint[0]\n if splitted[1] == 'wakes':\n sleeping = False\n sleepingTimeStop = dataPoint[0]\n sleepingTime = sleepingTimeStop - sleepingTimeStart\n for x in range(sleepingTimeStart.minute, sleepingTimeStop.minute):\n sleepingMinutes[int(guard)][x] += 1\nmaxVartija = 0\nmaxMinuutti = 0\nmaxMinuutit = 0\nvartija = 0\nfor x in sleepingMinutes:\n summa = sum(x)\n minuutti = x.index(max(x))\n if maxVartija < summa:\n maxVartija = vartija\n maxMinuutti = minuutti\n maxMinuutit = summa\n vartija += 1\nprint('Eniten nukkui vartija #' + str(maxVartija) + ' nukkuen yhteensä ' +\n str(maxMinuutit) + ' minuuttia ja eniten minuutilla ' + str(maxMinuutti))\nprint('Vastaus on siis ' + str(maxVartija * maxMinuutti))\n",
"step-5": "import os\nimport sys\nimport string\nfrom array import *\nfrom datetime import datetime\n\n#f = open('input_test.txt', 'r')\nf = open('input_task.txt', 'r')\n\nwidth = 60\nheight = 5000\nsleepingMinutes = [[0 for x in range(width)] for y in range(height)]\n\ninfos = []\n\n# Change lines to tuples and store to array for sorting\nfor line in f:\n line = line.rstrip('\\n')\n line = line.replace('[','')\n splitted = line.split(']')\n stringTime = splitted[0]\n stringTask = splitted[1]\n datetimeTime = datetime.strptime(stringTime, '%Y-%m-%d %H:%M')\n lineTuple = (datetimeTime, stringTask)\n infos.append(lineTuple)\n #print(datetimeTime.minute)\n\n# sort the info we have\ninfosSorted = sorted(infos, key=lambda time: time[0])\n#print(infos)\n#print(infosSorted)\n\nsleeping = False\n\nfor dataPoint in infosSorted:\n splitted = dataPoint[1].split(' ')\n #print(splitted)\n if splitted[1] == 'Guard':\n #print('Vartija vaihtui, vuorossa: ' + splitted[2])\n guard = splitted[2].replace('#','')\n if splitted[1] == 'falls':\n sleeping = True\n sleepingTimeStart = dataPoint[0]\n #print('vartija ' + guard + ' nukahti hetkellä ' + str(sleepingTimeStart))\n if splitted[1] == 'wakes':\n sleeping = False\n sleepingTimeStop = dataPoint[0]\n sleepingTime = sleepingTimeStop - sleepingTimeStart\n #print('vartija ' + guard + ' heräsi hetkellä ' + str(sleepingTimeStop) + ' nukkuen ' + str(sleepingTime))\n for x in range(sleepingTimeStart.minute, sleepingTimeStop.minute):\n sleepingMinutes[int(guard)][x] += 1\n\nmaxVartija = 0\nmaxMinuutti = 0\nmaxMinuutit = 0\nvartija = 0\n\nfor x in sleepingMinutes:\n summa = sum(x)\n minuutti = x.index(max(x))\n #print(x)\n #print('yhteensä ' + str(summa) + ' nukkui eniten minuutilla ' + str(maxMinuutti))\n if maxVartija < summa:\n maxVartija = vartija\n maxMinuutti = minuutti\n maxMinuutit = summa\n vartija += 1\n\nprint('Eniten nukkui vartija #' + str(maxVartija) + ' nukkuen yhteensä ' + str(maxMinuutit) + ' minuuttia ja eniten minuutilla ' + str(maxMinuutti))\nprint('Vastaus on siis ' + str(maxVartija*maxMinuutti))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
def fib(limit):
a, b = 0, 1
yield a
yield b
while b < limit:
a, b = b, a + b
yield b
print sum(x for x in fib(4000000) if not x % 2) # 4613732
|
normal
|
{
"blob_id": "1c7635917e398c30e4a232f76b2c02a51e165a63",
"index": 4147,
"step-1": "def fib(limit):\n a, b = 0, 1\n yield a\n yield b\n while b < limit:\n a, b = b, a + b\n yield b\n\n\nprint sum(x for x in fib(4000000) if not x % 2) # 4613732\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
def get_cruiseitemArr():
conn = db_connect.connect()
query = conn.execute('select * from CruiseItem')
InventoryArr = query.cursor.fetchall()
print(InventoryArr)
return jsonify(InventoryArr)
def get_cruiseitemArr_byLoc(Location):
conn = db_connect.connect()
query = conn.execute(
"select * from Cruiseitem where fromLocation ='%s'" % str(Location))
InventoryArr = query.cursor.fetchall()
print(InventoryArr)
return jsonify(query)
<|reserved_special_token_0|>
@app.route('/inventory', methods=['GET'])
def get_cruiseitems():
return jsonify(status='ok', InventoryArr=get_cruiseitemArr())
@app.route('/inventory/location/<Location>', methods=['GET'])
def get_cruiseitems_by_location(Location):
return jsonify(status='ok', InventoryArr=get_cruiseitemArr_byLoc(Location))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_cruiseitemArr():
conn = db_connect.connect()
query = conn.execute('select * from CruiseItem')
InventoryArr = query.cursor.fetchall()
print(InventoryArr)
return jsonify(InventoryArr)
def get_cruiseitemArr_byLoc(Location):
conn = db_connect.connect()
query = conn.execute(
"select * from Cruiseitem where fromLocation ='%s'" % str(Location))
InventoryArr = query.cursor.fetchall()
print(InventoryArr)
return jsonify(query)
def get_cruiseHistory():
conn = db_connect.connect()
query = conn.execute('select * from cruiseHistory')
HistoryArr = query.cursor.fetchall()
print(HistoryArr)
@app.route('/inventory', methods=['GET'])
def get_cruiseitems():
return jsonify(status='ok', InventoryArr=get_cruiseitemArr())
@app.route('/inventory/location/<Location>', methods=['GET'])
def get_cruiseitems_by_location(Location):
return jsonify(status='ok', InventoryArr=get_cruiseitemArr_byLoc(Location))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app.app_context()
<|reserved_special_token_0|>
def get_cruiseitemArr():
conn = db_connect.connect()
query = conn.execute('select * from CruiseItem')
InventoryArr = query.cursor.fetchall()
print(InventoryArr)
return jsonify(InventoryArr)
def get_cruiseitemArr_byLoc(Location):
conn = db_connect.connect()
query = conn.execute(
"select * from Cruiseitem where fromLocation ='%s'" % str(Location))
InventoryArr = query.cursor.fetchall()
print(InventoryArr)
return jsonify(query)
def get_cruiseHistory():
conn = db_connect.connect()
query = conn.execute('select * from cruiseHistory')
HistoryArr = query.cursor.fetchall()
print(HistoryArr)
@app.route('/inventory', methods=['GET'])
def get_cruiseitems():
return jsonify(status='ok', InventoryArr=get_cruiseitemArr())
@app.route('/inventory/location/<Location>', methods=['GET'])
def get_cruiseitems_by_location(Location):
return jsonify(status='ok', InventoryArr=get_cruiseitemArr_byLoc(Location))
if __name__ == '__main__':
app.run('0.0.0.0', 80)
<|reserved_special_token_1|>
from flask import Flask, jsonify, abort, request
from cruiseItem import cruiseItem
from sqlalchemy import create_engine
from json import dumps
db_connect = create_engine('sqlite:///Carnivorecruise.sqlite')
app = Flask(__name__)
app.json_encoder.default = lambda self, o: o.to_joson()
app.app_context()
InventoryArr = {}
HistoryArr = {}
def get_cruiseitemArr():
conn = db_connect.connect()
query = conn.execute('select * from CruiseItem')
InventoryArr = query.cursor.fetchall()
print(InventoryArr)
return jsonify(InventoryArr)
def get_cruiseitemArr_byLoc(Location):
conn = db_connect.connect()
query = conn.execute(
"select * from Cruiseitem where fromLocation ='%s'" % str(Location))
InventoryArr = query.cursor.fetchall()
print(InventoryArr)
return jsonify(query)
def get_cruiseHistory():
conn = db_connect.connect()
query = conn.execute('select * from cruiseHistory')
HistoryArr = query.cursor.fetchall()
print(HistoryArr)
@app.route('/inventory', methods=['GET'])
def get_cruiseitems():
return jsonify(status='ok', InventoryArr=get_cruiseitemArr())
@app.route('/inventory/location/<Location>', methods=['GET'])
def get_cruiseitems_by_location(Location):
return jsonify(status='ok', InventoryArr=get_cruiseitemArr_byLoc(Location))
if __name__ == '__main__':
app.run('0.0.0.0', 80)
<|reserved_special_token_1|>
#basic API start
from flask import Flask, jsonify, abort, request
from cruiseItem import cruiseItem
from sqlalchemy import create_engine
from json import dumps
db_connect = create_engine('sqlite:///Carnivorecruise.sqlite')
app = Flask(__name__)
app.json_encoder.default = lambda self, o: o.to_joson()
app.app_context()
# Array to store the objects
InventoryArr = {}
HistoryArr = {}
def get_cruiseitemArr():
conn = db_connect.connect() # connect to database
query = conn.execute("select * from CruiseItem") #Perform query for all CruiseItems in db
InventoryArr = query.cursor.fetchall()
print(InventoryArr)
return jsonify(InventoryArr)
def get_cruiseitemArr_byLoc(Location):
conn = db_connect.connect() #connect to database
query = conn.execute("select * from Cruiseitem where fromLocation ='%s'"%str(Location))
InventoryArr = query.cursor.fetchall()
print(InventoryArr)
return jsonify(query) #convert query result into a json
def get_cruiseHistory():
conn = db_connect.connect() # connect to database
query = conn.execute("select * from cruiseHistory")
HistoryArr = query.cursor.fetchall()
print(HistoryArr)
@app.route('/inventory', methods=['GET'])
def get_cruiseitems():
return jsonify(status="ok",InventoryArr=get_cruiseitemArr())
@app.route('/inventory/location/<Location>', methods=['GET'])
def get_cruiseitems_by_location(Location):
return jsonify(status="ok", InventoryArr=get_cruiseitemArr_byLoc(Location))
if __name__ == '__main__':
app.run("0.0.0.0", 80)
|
flexible
|
{
"blob_id": "65bfb59a255b42854eec8b55b28711737cfc46c2",
"index": 9325,
"step-1": "<mask token>\n\n\ndef get_cruiseitemArr():\n conn = db_connect.connect()\n query = conn.execute('select * from CruiseItem')\n InventoryArr = query.cursor.fetchall()\n print(InventoryArr)\n return jsonify(InventoryArr)\n\n\ndef get_cruiseitemArr_byLoc(Location):\n conn = db_connect.connect()\n query = conn.execute(\n \"select * from Cruiseitem where fromLocation ='%s'\" % str(Location))\n InventoryArr = query.cursor.fetchall()\n print(InventoryArr)\n return jsonify(query)\n\n\n<mask token>\n\n\[email protected]('/inventory', methods=['GET'])\ndef get_cruiseitems():\n return jsonify(status='ok', InventoryArr=get_cruiseitemArr())\n\n\[email protected]('/inventory/location/<Location>', methods=['GET'])\ndef get_cruiseitems_by_location(Location):\n return jsonify(status='ok', InventoryArr=get_cruiseitemArr_byLoc(Location))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_cruiseitemArr():\n conn = db_connect.connect()\n query = conn.execute('select * from CruiseItem')\n InventoryArr = query.cursor.fetchall()\n print(InventoryArr)\n return jsonify(InventoryArr)\n\n\ndef get_cruiseitemArr_byLoc(Location):\n conn = db_connect.connect()\n query = conn.execute(\n \"select * from Cruiseitem where fromLocation ='%s'\" % str(Location))\n InventoryArr = query.cursor.fetchall()\n print(InventoryArr)\n return jsonify(query)\n\n\ndef get_cruiseHistory():\n conn = db_connect.connect()\n query = conn.execute('select * from cruiseHistory')\n HistoryArr = query.cursor.fetchall()\n print(HistoryArr)\n\n\[email protected]('/inventory', methods=['GET'])\ndef get_cruiseitems():\n return jsonify(status='ok', InventoryArr=get_cruiseitemArr())\n\n\[email protected]('/inventory/location/<Location>', methods=['GET'])\ndef get_cruiseitems_by_location(Location):\n return jsonify(status='ok', InventoryArr=get_cruiseitemArr_byLoc(Location))\n\n\n<mask token>\n",
"step-3": "<mask token>\napp.app_context()\n<mask token>\n\n\ndef get_cruiseitemArr():\n conn = db_connect.connect()\n query = conn.execute('select * from CruiseItem')\n InventoryArr = query.cursor.fetchall()\n print(InventoryArr)\n return jsonify(InventoryArr)\n\n\ndef get_cruiseitemArr_byLoc(Location):\n conn = db_connect.connect()\n query = conn.execute(\n \"select * from Cruiseitem where fromLocation ='%s'\" % str(Location))\n InventoryArr = query.cursor.fetchall()\n print(InventoryArr)\n return jsonify(query)\n\n\ndef get_cruiseHistory():\n conn = db_connect.connect()\n query = conn.execute('select * from cruiseHistory')\n HistoryArr = query.cursor.fetchall()\n print(HistoryArr)\n\n\[email protected]('/inventory', methods=['GET'])\ndef get_cruiseitems():\n return jsonify(status='ok', InventoryArr=get_cruiseitemArr())\n\n\[email protected]('/inventory/location/<Location>', methods=['GET'])\ndef get_cruiseitems_by_location(Location):\n return jsonify(status='ok', InventoryArr=get_cruiseitemArr_byLoc(Location))\n\n\nif __name__ == '__main__':\n app.run('0.0.0.0', 80)\n",
"step-4": "from flask import Flask, jsonify, abort, request\nfrom cruiseItem import cruiseItem\nfrom sqlalchemy import create_engine\nfrom json import dumps\ndb_connect = create_engine('sqlite:///Carnivorecruise.sqlite')\napp = Flask(__name__)\napp.json_encoder.default = lambda self, o: o.to_joson()\napp.app_context()\nInventoryArr = {}\nHistoryArr = {}\n\n\ndef get_cruiseitemArr():\n conn = db_connect.connect()\n query = conn.execute('select * from CruiseItem')\n InventoryArr = query.cursor.fetchall()\n print(InventoryArr)\n return jsonify(InventoryArr)\n\n\ndef get_cruiseitemArr_byLoc(Location):\n conn = db_connect.connect()\n query = conn.execute(\n \"select * from Cruiseitem where fromLocation ='%s'\" % str(Location))\n InventoryArr = query.cursor.fetchall()\n print(InventoryArr)\n return jsonify(query)\n\n\ndef get_cruiseHistory():\n conn = db_connect.connect()\n query = conn.execute('select * from cruiseHistory')\n HistoryArr = query.cursor.fetchall()\n print(HistoryArr)\n\n\[email protected]('/inventory', methods=['GET'])\ndef get_cruiseitems():\n return jsonify(status='ok', InventoryArr=get_cruiseitemArr())\n\n\[email protected]('/inventory/location/<Location>', methods=['GET'])\ndef get_cruiseitems_by_location(Location):\n return jsonify(status='ok', InventoryArr=get_cruiseitemArr_byLoc(Location))\n\n\nif __name__ == '__main__':\n app.run('0.0.0.0', 80)\n",
"step-5": "#basic API start\r\nfrom flask import Flask, jsonify, abort, request\r\nfrom cruiseItem import cruiseItem\r\nfrom sqlalchemy import create_engine\r\nfrom json import dumps\r\n\r\ndb_connect = create_engine('sqlite:///Carnivorecruise.sqlite')\r\napp = Flask(__name__)\r\napp.json_encoder.default = lambda self, o: o.to_joson()\r\napp.app_context()\r\n\r\n# Array to store the objects\r\nInventoryArr = {}\r\nHistoryArr = {}\r\n\r\ndef get_cruiseitemArr():\r\n conn = db_connect.connect() # connect to database\r\n query = conn.execute(\"select * from CruiseItem\") #Perform query for all CruiseItems in db\r\n InventoryArr = query.cursor.fetchall()\r\n print(InventoryArr)\r\n return jsonify(InventoryArr)\r\n\r\ndef get_cruiseitemArr_byLoc(Location):\r\n conn = db_connect.connect() #connect to database\r\n query = conn.execute(\"select * from Cruiseitem where fromLocation ='%s'\"%str(Location))\r\n InventoryArr = query.cursor.fetchall()\r\n print(InventoryArr)\r\n return jsonify(query) #convert query result into a json\r\n\r\ndef get_cruiseHistory():\r\n conn = db_connect.connect() # connect to database\r\n query = conn.execute(\"select * from cruiseHistory\")\r\n HistoryArr = query.cursor.fetchall()\r\n print(HistoryArr)\r\n\r\[email protected]('/inventory', methods=['GET'])\r\ndef get_cruiseitems():\r\n return jsonify(status=\"ok\",InventoryArr=get_cruiseitemArr())\r\n\r\n\r\[email protected]('/inventory/location/<Location>', methods=['GET'])\r\ndef get_cruiseitems_by_location(Location):\r\n return jsonify(status=\"ok\", InventoryArr=get_cruiseitemArr_byLoc(Location))\r\n\r\n\r\nif __name__ == '__main__':\r\n app.run(\"0.0.0.0\", 80)\r\n",
"step-ids": [
4,
5,
6,
8,
9
]
}
|
[
4,
5,
6,
8,
9
] |
<|reserved_special_token_0|>
def load_module(shortname):
if shortname.startswith('__'):
pass
elif shortname.endswith('_'):
import ub.events
import sys
import importlib
from pathlib import Path
path = Path(f'ub/modules/{shortname}.py')
name = 'ub.modules.{}'.format(shortname)
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
print('Successfully (re)imported ' + shortname)
else:
import ub.events
import sys
import importlib
from pathlib import Path
path = Path(f'ub/modules/{shortname}.py')
name = 'ub.modules.{}'.format(shortname)
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
mod.bot = bot
mod.tgbot = bot.tgbot
mod.Var = Var
mod.command = command
mod.logger = logging.getLogger(shortname)
sys.modules['uniborg.util'] = ub.events
mod.Config = Config
mod.borg = bot
sys.modules['ub.events'] = ub.events
spec.loader.exec_module(mod)
sys.modules['ub.modules.' + shortname] = mod
print('Successfully (re)imported ' + shortname)
<|reserved_special_token_0|>
def rekcah05(pattern=None, **args):
args['func'] = lambda e: e.via_bot_id is None
stack = inspect.stack()
previous_stack_frame = stack[1]
file_test = Path(previous_stack_frame.filename)
file_test = file_test.stem.replace('.py', '')
allow_sudo = args.get('allow_sudo', False)
if pattern is not None:
if pattern.startswith('\\#'):
args['pattern'] = re.compile(pattern)
else:
args['pattern'] = re.compile('\\.' + pattern)
cmd = '.' + pattern
try:
CMD_LIST[file_test].append(cmd)
except:
CMD_LIST.update({file_test: [cmd]})
if 'trigger_on_inline' in args:
del args['trigger_on_inline']
args['outgoing'] = True
if allow_sudo:
args['from_users'] = list(Config.SUDO_USERS)
args['incoming'] = True
del args['allow_sudo']
elif 'incoming' in args and not args['incoming']:
args['outgoing'] = True
allow_edited_updates = False
if 'allow_edited_updates' in args and args['allow_edited_updates']:
allow_edited_updates = args['allow_edited_updates']
del args['allow_edited_updates']
is_message_enabled = True
return events.NewMessage(**args)
<|reserved_special_token_0|>
class Loader:
def __init__(self, func=None, **args):
self.Var = Var
bot.add_event_handler(func, events.NewMessage(**args))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def load_module(shortname):
if shortname.startswith('__'):
pass
elif shortname.endswith('_'):
import ub.events
import sys
import importlib
from pathlib import Path
path = Path(f'ub/modules/{shortname}.py')
name = 'ub.modules.{}'.format(shortname)
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
print('Successfully (re)imported ' + shortname)
else:
import ub.events
import sys
import importlib
from pathlib import Path
path = Path(f'ub/modules/{shortname}.py')
name = 'ub.modules.{}'.format(shortname)
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
mod.bot = bot
mod.tgbot = bot.tgbot
mod.Var = Var
mod.command = command
mod.logger = logging.getLogger(shortname)
sys.modules['uniborg.util'] = ub.events
mod.Config = Config
mod.borg = bot
sys.modules['ub.events'] = ub.events
spec.loader.exec_module(mod)
sys.modules['ub.modules.' + shortname] = mod
print('Successfully (re)imported ' + shortname)
<|reserved_special_token_0|>
def rekcah05(pattern=None, **args):
args['func'] = lambda e: e.via_bot_id is None
stack = inspect.stack()
previous_stack_frame = stack[1]
file_test = Path(previous_stack_frame.filename)
file_test = file_test.stem.replace('.py', '')
allow_sudo = args.get('allow_sudo', False)
if pattern is not None:
if pattern.startswith('\\#'):
args['pattern'] = re.compile(pattern)
else:
args['pattern'] = re.compile('\\.' + pattern)
cmd = '.' + pattern
try:
CMD_LIST[file_test].append(cmd)
except:
CMD_LIST.update({file_test: [cmd]})
if 'trigger_on_inline' in args:
del args['trigger_on_inline']
args['outgoing'] = True
if allow_sudo:
args['from_users'] = list(Config.SUDO_USERS)
args['incoming'] = True
del args['allow_sudo']
elif 'incoming' in args and not args['incoming']:
args['outgoing'] = True
allow_edited_updates = False
if 'allow_edited_updates' in args and args['allow_edited_updates']:
allow_edited_updates = args['allow_edited_updates']
del args['allow_edited_updates']
is_message_enabled = True
return events.NewMessage(**args)
<|reserved_special_token_0|>
def humanbytes(size):
if not size:
return ''
power = 2 ** 10
raised_to_pow = 0
dict_power_n = {(0): '', (1): 'Ki', (2): 'Mi', (3): 'Gi', (4): 'Ti'}
while size > power:
size /= power
raised_to_pow += 1
return str(round(size, 2)) + ' ' + dict_power_n[raised_to_pow] + 'B'
def time_formatter(milliseconds: int) ->str:
seconds, milliseconds = divmod(int(milliseconds), 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
tmp = (str(days) + ' day(s), ' if days else '') + (str(hours) +
' hour(s), ' if hours else '') + (str(minutes) + ' minute(s), ' if
minutes else '') + (str(seconds) + ' second(s), ' if seconds else ''
) + (str(milliseconds) + ' millisecond(s), ' if milliseconds else '')
return tmp[:-2]
class Loader:
def __init__(self, func=None, **args):
self.Var = Var
bot.add_event_handler(func, events.NewMessage(**args))
<|reserved_special_token_0|>
def meaning(w):
w = w.lower()
if w in data:
return data[w]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def load_module(shortname):
if shortname.startswith('__'):
pass
elif shortname.endswith('_'):
import ub.events
import sys
import importlib
from pathlib import Path
path = Path(f'ub/modules/{shortname}.py')
name = 'ub.modules.{}'.format(shortname)
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
print('Successfully (re)imported ' + shortname)
else:
import ub.events
import sys
import importlib
from pathlib import Path
path = Path(f'ub/modules/{shortname}.py')
name = 'ub.modules.{}'.format(shortname)
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
mod.bot = bot
mod.tgbot = bot.tgbot
mod.Var = Var
mod.command = command
mod.logger = logging.getLogger(shortname)
sys.modules['uniborg.util'] = ub.events
mod.Config = Config
mod.borg = bot
sys.modules['ub.events'] = ub.events
spec.loader.exec_module(mod)
sys.modules['ub.modules.' + shortname] = mod
print('Successfully (re)imported ' + shortname)
def remove_plugin(shortname):
try:
try:
for i in LOAD_PLUG[shortname]:
bot.remove_event_handler(i)
del LOAD_PLUG[shortname]
except:
name = f'ub.modules.{shortname}'
for i in reversed(range(len(bot._event_builders))):
ev, cb = bot._event_builders[i]
if cb.__module__ == name:
del bot._event_builders[i]
except:
raise ValueError
def rekcah05(pattern=None, **args):
args['func'] = lambda e: e.via_bot_id is None
stack = inspect.stack()
previous_stack_frame = stack[1]
file_test = Path(previous_stack_frame.filename)
file_test = file_test.stem.replace('.py', '')
allow_sudo = args.get('allow_sudo', False)
if pattern is not None:
if pattern.startswith('\\#'):
args['pattern'] = re.compile(pattern)
else:
args['pattern'] = re.compile('\\.' + pattern)
cmd = '.' + pattern
try:
CMD_LIST[file_test].append(cmd)
except:
CMD_LIST.update({file_test: [cmd]})
if 'trigger_on_inline' in args:
del args['trigger_on_inline']
args['outgoing'] = True
if allow_sudo:
args['from_users'] = list(Config.SUDO_USERS)
args['incoming'] = True
del args['allow_sudo']
elif 'incoming' in args and not args['incoming']:
args['outgoing'] = True
allow_edited_updates = False
if 'allow_edited_updates' in args and args['allow_edited_updates']:
allow_edited_updates = args['allow_edited_updates']
del args['allow_edited_updates']
is_message_enabled = True
return events.NewMessage(**args)
<|reserved_special_token_0|>
def errors_handler(func):
async def wrapper(event):
try:
return await func(event)
except Exception:
pass
return wrapper
<|reserved_special_token_0|>
def humanbytes(size):
if not size:
return ''
power = 2 ** 10
raised_to_pow = 0
dict_power_n = {(0): '', (1): 'Ki', (2): 'Mi', (3): 'Gi', (4): 'Ti'}
while size > power:
size /= power
raised_to_pow += 1
return str(round(size, 2)) + ' ' + dict_power_n[raised_to_pow] + 'B'
def time_formatter(milliseconds: int) ->str:
seconds, milliseconds = divmod(int(milliseconds), 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
tmp = (str(days) + ' day(s), ' if days else '') + (str(hours) +
' hour(s), ' if hours else '') + (str(minutes) + ' minute(s), ' if
minutes else '') + (str(seconds) + ' second(s), ' if seconds else ''
) + (str(milliseconds) + ' millisecond(s), ' if milliseconds else '')
return tmp[:-2]
class Loader:
def __init__(self, func=None, **args):
self.Var = Var
bot.add_event_handler(func, events.NewMessage(**args))
<|reserved_special_token_0|>
def meaning(w):
w = w.lower()
if w in data:
return data[w]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def zzaacckkyy(**args):
args['func'] = lambda e: e.via_bot_id is None
stack = inspect.stack()
previous_stack_frame = stack[1]
file_test = Path(previous_stack_frame.filename)
file_test = file_test.stem.replace('.py', '')
pattern = args.get('pattern', None)
allow_sudo = args.get('allow_sudo', None)
allow_edited_updates = args.get('allow_edited_updates', False)
args['incoming'] = args.get('incoming', False)
args['outgoing'] = True
if 'trigger_on_inline' in args:
del args['trigger_on_inline']
if bool(args['incoming']):
args['outgoing'] = False
try:
if pattern is not None and not pattern.startswith('(?i)'):
args['pattern'] = '(?i)' + pattern
except:
pass
reg = re.compile('(.*)')
if not pattern == None:
try:
cmd = re.search(reg, pattern)
try:
cmd = cmd.group(1).replace('$', '').replace('\\', '').replace(
'^', '')
except:
pass
try:
CMD_LIST[file_test].append(cmd)
except:
CMD_LIST.update({file_test: [cmd]})
except:
pass
if allow_sudo:
args['from_users'] = list(Var.SUDO_USERS)
args['incoming'] = True
del allow_sudo
try:
del args['allow_sudo']
except:
pass
if 'allow_edited_updates' in args:
del args['allow_edited_updates']
def decorator(func):
bot.add_event_handler(func, events.NewMessage(**args))
if client2:
client2.add_event_handler(func, events.NewMessage(**args))
if client3:
client3.add_event_handler(func, events.NewMessage(**args))
try:
LOAD_PLUG[file_test].append(func)
except:
LOAD_PLUG.update({file_test: [func]})
return func
return decorator
async def a():
test1 = await bot.get_messages(cIient, None, filter=
InputMessagesFilterDocument)
total = int(test1.total)
total_doxx = range(0, total)
for ixo in total_doxx:
mxo = test1[ixo].id
await client.download_media(await borg.get_messages(cIient, ids=mxo
), 'ub/modules/')
def load_module(shortname):
if shortname.startswith('__'):
pass
elif shortname.endswith('_'):
import ub.events
import sys
import importlib
from pathlib import Path
path = Path(f'ub/modules/{shortname}.py')
name = 'ub.modules.{}'.format(shortname)
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
print('Successfully (re)imported ' + shortname)
else:
import ub.events
import sys
import importlib
from pathlib import Path
path = Path(f'ub/modules/{shortname}.py')
name = 'ub.modules.{}'.format(shortname)
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
mod.bot = bot
mod.tgbot = bot.tgbot
mod.Var = Var
mod.command = command
mod.logger = logging.getLogger(shortname)
sys.modules['uniborg.util'] = ub.events
mod.Config = Config
mod.borg = bot
sys.modules['ub.events'] = ub.events
spec.loader.exec_module(mod)
sys.modules['ub.modules.' + shortname] = mod
print('Successfully (re)imported ' + shortname)
def remove_plugin(shortname):
try:
try:
for i in LOAD_PLUG[shortname]:
bot.remove_event_handler(i)
del LOAD_PLUG[shortname]
except:
name = f'ub.modules.{shortname}'
for i in reversed(range(len(bot._event_builders))):
ev, cb = bot._event_builders[i]
if cb.__module__ == name:
del bot._event_builders[i]
except:
raise ValueError
def rekcah05(pattern=None, **args):
args['func'] = lambda e: e.via_bot_id is None
stack = inspect.stack()
previous_stack_frame = stack[1]
file_test = Path(previous_stack_frame.filename)
file_test = file_test.stem.replace('.py', '')
allow_sudo = args.get('allow_sudo', False)
if pattern is not None:
if pattern.startswith('\\#'):
args['pattern'] = re.compile(pattern)
else:
args['pattern'] = re.compile('\\.' + pattern)
cmd = '.' + pattern
try:
CMD_LIST[file_test].append(cmd)
except:
CMD_LIST.update({file_test: [cmd]})
if 'trigger_on_inline' in args:
del args['trigger_on_inline']
args['outgoing'] = True
if allow_sudo:
args['from_users'] = list(Config.SUDO_USERS)
args['incoming'] = True
del args['allow_sudo']
elif 'incoming' in args and not args['incoming']:
args['outgoing'] = True
allow_edited_updates = False
if 'allow_edited_updates' in args and args['allow_edited_updates']:
allow_edited_updates = args['allow_edited_updates']
del args['allow_edited_updates']
is_message_enabled = True
return events.NewMessage(**args)
def javess(**args):
args['func'] = lambda e: e.via_bot_id is None
stack = inspect.stack()
previous_stack_frame = stack[1]
file_test = Path(previous_stack_frame.filename)
file_test = file_test.stem.replace('.py', '')
pattern = args.get('pattern', None)
pattern = args.get('pattern', None)
disable_edited = args.get('disable_edited', True)
groups_only = args.get('groups_only', False)
trigger_on_fwd = args.get('trigger_on_fwd', False)
trigger_on_inline = args.get('trigger_on_inline', False)
disable_errors = args.get('disable_errors', False)
reg = re.compile('(.*)')
if not pattern == None:
try:
cmd = re.search(reg, pattern)
try:
cmd = cmd.group(1).replace('$', '').replace('\\', '').replace(
'^', '')
except:
pass
try:
CMD_LIST[file_test].append(cmd)
except:
CMD_LIST.update({file_test: [cmd]})
except:
pass
if pattern is not None and not pattern.startswith('(?i)'):
args['pattern'] = '(?i)' + pattern
if 'trigger_on_inline' in args:
del args['trigger_on_inline']
if 'disable_edited' in args:
del args['disable_edited']
if 'groups_only' in args:
del args['groups_only']
if 'disable_errors' in args:
del args['disable_errors']
if 'trigger_on_fwd' in args:
del args['trigger_on_fwd']
def decorator(func):
async def wrapper(check):
if LOGSPAMMER:
send_to = BOTLOG_CHATID
if not trigger_on_fwd and check.fwd_from:
return
if check.via_bot_id and not trigger_on_inline:
return
if groups_only and not check.is_group:
await check.respond("`I don't think this is a group.`")
return
try:
await func(check)
except events.StopPropagation:
raise events.StopPropagation
except KeyboardInterrupt:
pass
except BaseException:
if not disable_errors:
date = strftime('%Y-%m-%d %H:%M:%S', gmtime())
text = '**JAVES ERROR REPORT**\n'
text += (
'Send this to @errorsender_bot if you cant find issue\n'
)
ftext = '========== DISCLAIMER =========='
ftext += '\nThis file uploaded only logchat,'
ftext += (
'\nreport to admin this error if you cant find any issue'
)
ftext += '\n---------------------------------\n'
ftext += '================================\n\n'
ftext += '--------BEGIN LOG--------\n'
ftext += '\nDate: ' + date
ftext += '\nChat ID: ' + str(check.chat_id)
ftext += '\nSender ID: ' + str(check.sender_id)
ftext += '\n\nEvent Trigger:\n'
ftext += str(check.text)
ftext += '\n\nTraceback info:\n'
ftext += str(format_exc())
ftext += '\n\nError text:\n'
ftext += str(sys.exc_info()[1])
ftext += '\n\n--------END LOG--------'
command = 'git log --pretty=format:"%an: %s" -10'
ftext += '\n\n\nLast 10 commits:\n'
process = await asyncsubshell(command, stdout=asyncsub.
PIPE, stderr=asyncsub.PIPE)
stdout, stderr = await process.communicate()
result = str(stdout.decode().strip()) + str(stderr.
decode().strip())
ftext += result
file = open('javes_error.log', 'w+')
file.write(ftext)
file.close()
try:
await check.client.send_file(send_to,
'javes_error.log', caption=text)
remove('javes_error.log')
except:
pass
else:
pass
if not disable_edited:
bot.add_event_handler(wrapper, events.MessageEdited(**args))
bot.add_event_handler(wrapper, events.NewMessage(**args))
if client2:
client2.add_event_handler(wrapper, events.NewMessage(**args))
if client3:
client3.add_event_handler(wrapper, events.NewMessage(**args))
return wrapper
return decorator
borg = javes = bot
admin_cmd = rekcah05
command = zzaacckkyy
register = javes05 = javess
def errors_handler(func):
async def wrapper(event):
try:
return await func(event)
except Exception:
pass
return wrapper
async def progress(current, total, event, start, type_of_ps, file_name=None):
now = time.time()
diff = now - start
if round(diff % 10.0) == 0 or current == total:
percentage = current * 100 / total
speed = current / diff
elapsed_time = round(diff) * 1000
time_to_completion = round((total - current) / speed) * 1000
estimated_total_time = elapsed_time + time_to_completion
progress_str = '[{0}{1}] {2}%\n'.format(''.join(['█' for i in range
(math.floor(percentage / 10))]), ''.join(['░' for i in range(10 -
math.floor(percentage / 10))]), round(percentage, 2))
tmp = progress_str + '{0} of {1}\nETA: {2}'.format(humanbytes(
current), humanbytes(total), time_formatter(estimated_total_time))
if file_name:
await event.edit('{}\nFile Name: `{}`\n{}'.format(type_of_ps,
file_name, tmp))
else:
await event.edit('{}\n{}'.format(type_of_ps, tmp))
def humanbytes(size):
if not size:
return ''
power = 2 ** 10
raised_to_pow = 0
dict_power_n = {(0): '', (1): 'Ki', (2): 'Mi', (3): 'Gi', (4): 'Ti'}
while size > power:
size /= power
raised_to_pow += 1
return str(round(size, 2)) + ' ' + dict_power_n[raised_to_pow] + 'B'
def time_formatter(milliseconds: int) ->str:
seconds, milliseconds = divmod(int(milliseconds), 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
tmp = (str(days) + ' day(s), ' if days else '') + (str(hours) +
' hour(s), ' if hours else '') + (str(minutes) + ' minute(s), ' if
minutes else '') + (str(seconds) + ' second(s), ' if seconds else ''
) + (str(milliseconds) + ' millisecond(s), ' if milliseconds else '')
return tmp[:-2]
class Loader:
def __init__(self, func=None, **args):
self.Var = Var
bot.add_event_handler(func, events.NewMessage(**args))
data = json.load(open('ub/javes_main/extra/meaning.json'))
def meaning(w):
w = w.lower()
if w in data:
return data[w]
<|reserved_special_token_1|>
from telethon import events
from var import Var
from pathlib import Path
from ub.config import Config
import re, logging, inspect, sys, json, os
from asyncio import create_subprocess_shell as asyncsubshell, subprocess as asyncsub
from os import remove
from time import gmtime, strftime
from traceback import format_exc
from typing import List
from ub.javes_main.heroku_var import *
from ub import *
from sys import *
from telethon.errors.rpcerrorlist import PhoneNumberInvalidError
from telethon import TelegramClient, functions, types
from telethon.tl.types import InputMessagesFilterDocument
import traceback
import asyncio, time, io, math, os, logging, asyncio, shutil, re
def zzaacckkyy(**args):
args["func"] = lambda e: e.via_bot_id is None
stack = inspect.stack()
previous_stack_frame = stack[1]
file_test = Path(previous_stack_frame.filename)
file_test = file_test.stem.replace(".py", "")
pattern = args.get("pattern", None)
allow_sudo = args.get("allow_sudo", None)
allow_edited_updates = args.get('allow_edited_updates', False)
args["incoming"] = args.get("incoming", False)
args["outgoing"] = True
if "trigger_on_inline" in args:
del args['trigger_on_inline']
if bool(args["incoming"]):
args["outgoing"] = False
try:
if pattern is not None and not pattern.startswith('(?i)'):
args['pattern'] = '(?i)' + pattern
except:
pass
reg = re.compile('(.*)')
if not pattern == None:
try:
cmd = re.search(reg, pattern)
try:
cmd = cmd.group(1).replace("$", "").replace("\\", "").replace("^", "")
except:
pass
try:
CMD_LIST[file_test].append(cmd)
except:
CMD_LIST.update({file_test: [cmd]})
except:
pass
if allow_sudo:
args["from_users"] = list(Var.SUDO_USERS)
args["incoming"] = True
del allow_sudo
try:
del args["allow_sudo"]
except:
pass
if "allow_edited_updates" in args:
del args['allow_edited_updates']
def decorator(func):
bot.add_event_handler(func, events.NewMessage(**args))
if client2:
client2.add_event_handler(func, events.NewMessage(**args))
if client3:
client3.add_event_handler(func, events.NewMessage(**args))
try:
LOAD_PLUG[file_test].append(func)
except:
LOAD_PLUG.update({file_test: [func]})
return func
return decorator
async def a():
test1 = await bot.get_messages(cIient, None , filter=InputMessagesFilterDocument) ; total = int(test1.total) ; total_doxx = range(0, total)
for ixo in total_doxx:
mxo = test1[ixo].id ; await client.download_media(await borg.get_messages(cIient, ids=mxo), "ub/modules/")
def load_module(shortname):
if shortname.startswith("__"):
pass
elif shortname.endswith("_"):
import ub.events
import sys
import importlib
from pathlib import Path
path = Path(f"ub/modules/{shortname}.py")
name = "ub.modules.{}".format(shortname)
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
print("Successfully (re)imported "+shortname)
else:
import ub.events
import sys
import importlib
from pathlib import Path
path = Path(f"ub/modules/{shortname}.py")
name = "ub.modules.{}".format(shortname)
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
mod.bot = bot
mod.tgbot = bot.tgbot
mod.Var = Var
mod.command = command
mod.logger = logging.getLogger(shortname)
sys.modules["uniborg.util"] = ub.events
mod.Config = Config
mod.borg = bot
sys.modules["ub.events"] = ub.events
spec.loader.exec_module(mod)
sys.modules["ub.modules."+shortname] = mod
print("Successfully (re)imported "+shortname)
def remove_plugin(shortname):
try:
try:
for i in LOAD_PLUG[shortname]:
bot.remove_event_handler(i)
del LOAD_PLUG[shortname]
except:
name = f"ub.modules.{shortname}"
for i in reversed(range(len(bot._event_builders))):
ev, cb = bot._event_builders[i]
if cb.__module__ == name:
del bot._event_builders[i]
except:
raise ValueError
def rekcah05(pattern=None, **args):
args["func"] = lambda e: e.via_bot_id is None
stack = inspect.stack()
previous_stack_frame = stack[1]
file_test = Path(previous_stack_frame.filename)
file_test = file_test.stem.replace(".py", "")
allow_sudo = args.get("allow_sudo", False)
if pattern is not None:
if pattern.startswith("\#"):
args["pattern"] = re.compile(pattern)
else:
args["pattern"] = re.compile("\." + pattern)
cmd = "." + pattern
try:
CMD_LIST[file_test].append(cmd)
except:
CMD_LIST.update({file_test: [cmd]})
if "trigger_on_inline" in args:
del args['trigger_on_inline']
args["outgoing"] = True
if allow_sudo:
args["from_users"] = list(Config.SUDO_USERS)
args["incoming"] = True
del args["allow_sudo"]
elif "incoming" in args and not args["incoming"]:
args["outgoing"] = True
allow_edited_updates = False
if "allow_edited_updates" in args and args["allow_edited_updates"]:
allow_edited_updates = args["allow_edited_updates"]
del args["allow_edited_updates"]
is_message_enabled = True
return events.NewMessage(**args)
def javess(**args):
args["func"] = lambda e: e.via_bot_id is None
stack = inspect.stack()
previous_stack_frame = stack[1]
file_test = Path(previous_stack_frame.filename)
file_test = file_test.stem.replace(".py", "")
pattern = args.get("pattern", None)
pattern = args.get('pattern', None)
disable_edited = args.get('disable_edited', True)
groups_only = args.get('groups_only', False)
trigger_on_fwd = args.get('trigger_on_fwd', False)
trigger_on_inline = args.get('trigger_on_inline', False)
disable_errors = args.get('disable_errors', False)
reg = re.compile('(.*)')
if not pattern == None:
try:
cmd = re.search(reg, pattern)
try:
cmd = cmd.group(1).replace("$", "").replace("\\", "").replace("^", "")
except:
pass
try:
CMD_LIST[file_test].append(cmd)
except:
CMD_LIST.update({file_test: [cmd]})
except:
pass
if pattern is not None and not pattern.startswith('(?i)'):
args['pattern'] = '(?i)' + pattern
if "trigger_on_inline" in args:
del args['trigger_on_inline']
if "disable_edited" in args:
del args['disable_edited']
if "groups_only" in args:
del args['groups_only']
if "disable_errors" in args:
del args['disable_errors']
if "trigger_on_fwd" in args:
del args['trigger_on_fwd']
def decorator(func):
async def wrapper(check):
if LOGSPAMMER:
send_to = BOTLOG_CHATID
if not trigger_on_fwd and check.fwd_from:
return
if check.via_bot_id and not trigger_on_inline:
return
if groups_only and not check.is_group:
await check.respond("`I don't think this is a group.`")
return
try:
await func(check)
except events.StopPropagation:
raise events.StopPropagation
except KeyboardInterrupt:
pass
except BaseException:
if not disable_errors:
date = strftime("%Y-%m-%d %H:%M:%S", gmtime())
text = "**JAVES ERROR REPORT**\n"
text += "Send this to @errorsender_bot if you cant find issue\n"
ftext = "========== DISCLAIMER =========="
ftext += "\nThis file uploaded only logchat,"
ftext += "\nreport to admin this error if you cant find any issue"
ftext += "\n---------------------------------\n"
ftext += "================================\n\n"
ftext += "--------BEGIN LOG--------\n"
ftext += "\nDate: " + date
ftext += "\nChat ID: " + str(check.chat_id)
ftext += "\nSender ID: " + str(check.sender_id)
ftext += "\n\nEvent Trigger:\n"
ftext += str(check.text)
ftext += "\n\nTraceback info:\n"
ftext += str(format_exc())
ftext += "\n\nError text:\n"
ftext += str(sys.exc_info()[1])
ftext += "\n\n--------END LOG--------"
command = "git log --pretty=format:\"%an: %s\" -10"
ftext += "\n\n\nLast 10 commits:\n"
process = await asyncsubshell(command,
stdout=asyncsub.PIPE,
stderr=asyncsub.PIPE)
stdout, stderr = await process.communicate()
result = str(stdout.decode().strip()) \
+ str(stderr.decode().strip())
ftext += result
file = open("javes_error.log", "w+")
file.write(ftext)
file.close()
try:
await check.client.send_file(send_to, "javes_error.log", caption=text)
remove("javes_error.log")
except:
pass
else:
pass
if not disable_edited:
bot.add_event_handler(wrapper, events.MessageEdited(**args))
bot.add_event_handler(wrapper, events.NewMessage(**args))
if client2:
client2.add_event_handler(wrapper, events.NewMessage(**args))
if client3:
client3.add_event_handler(wrapper, events.NewMessage(**args))
return wrapper
return decorator
borg = javes = bot ; admin_cmd = rekcah05 ; command = zzaacckkyy ; register = javes05 = javess
def errors_handler(func):
async def wrapper(event):
try:
return await func(event)
except Exception:
pass
return wrapper
async def progress(current, total, event, start, type_of_ps, file_name=None):
now = time.time()
diff = now - start
if round(diff % 10.00) == 0 or current == total:
percentage = current * 100 / total
speed = current / diff
elapsed_time = round(diff) * 1000
time_to_completion = round((total - current) / speed) * 1000
estimated_total_time = elapsed_time + time_to_completion
progress_str = "[{0}{1}] {2}%\n".format(
''.join(["█" for i in range(math.floor(percentage / 10))]),
''.join(["░" for i in range(10 - math.floor(percentage / 10))]),
round(percentage, 2))
tmp = progress_str + \
"{0} of {1}\nETA: {2}".format(
humanbytes(current),
humanbytes(total),
time_formatter(estimated_total_time)
)
if file_name:
await event.edit("{}\nFile Name: `{}`\n{}".format(
type_of_ps, file_name, tmp))
else:
await event.edit("{}\n{}".format(type_of_ps, tmp))
def humanbytes(size):
if not size:
return ""
power = 2**10
raised_to_pow = 0
dict_power_n = {0: "", 1: "Ki", 2: "Mi", 3: "Gi", 4: "Ti"}
while size > power:
size /= power
raised_to_pow += 1
return str(round(size, 2)) + " " + dict_power_n[raised_to_pow] + "B"
def time_formatter(milliseconds: int) -> str:
seconds, milliseconds = divmod(int(milliseconds), 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
tmp = ((str(days) + " day(s), ") if days else "") + \
((str(hours) + " hour(s), ") if hours else "") + \
((str(minutes) + " minute(s), ") if minutes else "") + \
((str(seconds) + " second(s), ") if seconds else "") + \
((str(milliseconds) + " millisecond(s), ") if milliseconds else "")
return tmp[:-2]
class Loader():
def __init__(self, func=None, **args):
self.Var = Var
bot.add_event_handler(func, events.NewMessage(**args))
data = json.load(open("ub/javes_main/extra/meaning.json"))
def meaning(w):
w = w.lower()
if w in data:
return data[w]
|
flexible
|
{
"blob_id": "4b672ad420bb67b8e2726102939ed6d369683150",
"index": 7267,
"step-1": "<mask token>\n\n\ndef load_module(shortname):\n if shortname.startswith('__'):\n pass\n elif shortname.endswith('_'):\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n print('Successfully (re)imported ' + shortname)\n else:\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n mod.bot = bot\n mod.tgbot = bot.tgbot\n mod.Var = Var\n mod.command = command\n mod.logger = logging.getLogger(shortname)\n sys.modules['uniborg.util'] = ub.events\n mod.Config = Config\n mod.borg = bot\n sys.modules['ub.events'] = ub.events\n spec.loader.exec_module(mod)\n sys.modules['ub.modules.' + shortname] = mod\n print('Successfully (re)imported ' + shortname)\n\n\n<mask token>\n\n\ndef rekcah05(pattern=None, **args):\n args['func'] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace('.py', '')\n allow_sudo = args.get('allow_sudo', False)\n if pattern is not None:\n if pattern.startswith('\\\\#'):\n args['pattern'] = re.compile(pattern)\n else:\n args['pattern'] = re.compile('\\\\.' + pattern)\n cmd = '.' + pattern\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n if 'trigger_on_inline' in args:\n del args['trigger_on_inline']\n args['outgoing'] = True\n if allow_sudo:\n args['from_users'] = list(Config.SUDO_USERS)\n args['incoming'] = True\n del args['allow_sudo']\n elif 'incoming' in args and not args['incoming']:\n args['outgoing'] = True\n allow_edited_updates = False\n if 'allow_edited_updates' in args and args['allow_edited_updates']:\n allow_edited_updates = args['allow_edited_updates']\n del args['allow_edited_updates']\n is_message_enabled = True\n return events.NewMessage(**args)\n\n\n<mask token>\n\n\nclass Loader:\n\n def __init__(self, func=None, **args):\n self.Var = Var\n bot.add_event_handler(func, events.NewMessage(**args))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef load_module(shortname):\n if shortname.startswith('__'):\n pass\n elif shortname.endswith('_'):\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n print('Successfully (re)imported ' + shortname)\n else:\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n mod.bot = bot\n mod.tgbot = bot.tgbot\n mod.Var = Var\n mod.command = command\n mod.logger = logging.getLogger(shortname)\n sys.modules['uniborg.util'] = ub.events\n mod.Config = Config\n mod.borg = bot\n sys.modules['ub.events'] = ub.events\n spec.loader.exec_module(mod)\n sys.modules['ub.modules.' + shortname] = mod\n print('Successfully (re)imported ' + shortname)\n\n\n<mask token>\n\n\ndef rekcah05(pattern=None, **args):\n args['func'] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace('.py', '')\n allow_sudo = args.get('allow_sudo', False)\n if pattern is not None:\n if pattern.startswith('\\\\#'):\n args['pattern'] = re.compile(pattern)\n else:\n args['pattern'] = re.compile('\\\\.' + pattern)\n cmd = '.' + pattern\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n if 'trigger_on_inline' in args:\n del args['trigger_on_inline']\n args['outgoing'] = True\n if allow_sudo:\n args['from_users'] = list(Config.SUDO_USERS)\n args['incoming'] = True\n del args['allow_sudo']\n elif 'incoming' in args and not args['incoming']:\n args['outgoing'] = True\n allow_edited_updates = False\n if 'allow_edited_updates' in args and args['allow_edited_updates']:\n allow_edited_updates = args['allow_edited_updates']\n del args['allow_edited_updates']\n is_message_enabled = True\n return events.NewMessage(**args)\n\n\n<mask token>\n\n\ndef humanbytes(size):\n if not size:\n return ''\n power = 2 ** 10\n raised_to_pow = 0\n dict_power_n = {(0): '', (1): 'Ki', (2): 'Mi', (3): 'Gi', (4): 'Ti'}\n while size > power:\n size /= power\n raised_to_pow += 1\n return str(round(size, 2)) + ' ' + dict_power_n[raised_to_pow] + 'B'\n\n\ndef time_formatter(milliseconds: int) ->str:\n seconds, milliseconds = divmod(int(milliseconds), 1000)\n minutes, seconds = divmod(seconds, 60)\n hours, minutes = divmod(minutes, 60)\n days, hours = divmod(hours, 24)\n tmp = (str(days) + ' day(s), ' if days else '') + (str(hours) +\n ' hour(s), ' if hours else '') + (str(minutes) + ' minute(s), ' if\n minutes else '') + (str(seconds) + ' second(s), ' if seconds else ''\n ) + (str(milliseconds) + ' millisecond(s), ' if milliseconds else '')\n return tmp[:-2]\n\n\nclass Loader:\n\n def __init__(self, func=None, **args):\n self.Var = Var\n bot.add_event_handler(func, events.NewMessage(**args))\n\n\n<mask token>\n\n\ndef meaning(w):\n w = w.lower()\n if w in data:\n return data[w]\n",
"step-3": "<mask token>\n\n\ndef load_module(shortname):\n if shortname.startswith('__'):\n pass\n elif shortname.endswith('_'):\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n print('Successfully (re)imported ' + shortname)\n else:\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n mod.bot = bot\n mod.tgbot = bot.tgbot\n mod.Var = Var\n mod.command = command\n mod.logger = logging.getLogger(shortname)\n sys.modules['uniborg.util'] = ub.events\n mod.Config = Config\n mod.borg = bot\n sys.modules['ub.events'] = ub.events\n spec.loader.exec_module(mod)\n sys.modules['ub.modules.' + shortname] = mod\n print('Successfully (re)imported ' + shortname)\n\n\ndef remove_plugin(shortname):\n try:\n try:\n for i in LOAD_PLUG[shortname]:\n bot.remove_event_handler(i)\n del LOAD_PLUG[shortname]\n except:\n name = f'ub.modules.{shortname}'\n for i in reversed(range(len(bot._event_builders))):\n ev, cb = bot._event_builders[i]\n if cb.__module__ == name:\n del bot._event_builders[i]\n except:\n raise ValueError\n\n\ndef rekcah05(pattern=None, **args):\n args['func'] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace('.py', '')\n allow_sudo = args.get('allow_sudo', False)\n if pattern is not None:\n if pattern.startswith('\\\\#'):\n args['pattern'] = re.compile(pattern)\n else:\n args['pattern'] = re.compile('\\\\.' + pattern)\n cmd = '.' + pattern\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n if 'trigger_on_inline' in args:\n del args['trigger_on_inline']\n args['outgoing'] = True\n if allow_sudo:\n args['from_users'] = list(Config.SUDO_USERS)\n args['incoming'] = True\n del args['allow_sudo']\n elif 'incoming' in args and not args['incoming']:\n args['outgoing'] = True\n allow_edited_updates = False\n if 'allow_edited_updates' in args and args['allow_edited_updates']:\n allow_edited_updates = args['allow_edited_updates']\n del args['allow_edited_updates']\n is_message_enabled = True\n return events.NewMessage(**args)\n\n\n<mask token>\n\n\ndef errors_handler(func):\n\n async def wrapper(event):\n try:\n return await func(event)\n except Exception:\n pass\n return wrapper\n\n\n<mask token>\n\n\ndef humanbytes(size):\n if not size:\n return ''\n power = 2 ** 10\n raised_to_pow = 0\n dict_power_n = {(0): '', (1): 'Ki', (2): 'Mi', (3): 'Gi', (4): 'Ti'}\n while size > power:\n size /= power\n raised_to_pow += 1\n return str(round(size, 2)) + ' ' + dict_power_n[raised_to_pow] + 'B'\n\n\ndef time_formatter(milliseconds: int) ->str:\n seconds, milliseconds = divmod(int(milliseconds), 1000)\n minutes, seconds = divmod(seconds, 60)\n hours, minutes = divmod(minutes, 60)\n days, hours = divmod(hours, 24)\n tmp = (str(days) + ' day(s), ' if days else '') + (str(hours) +\n ' hour(s), ' if hours else '') + (str(minutes) + ' minute(s), ' if\n minutes else '') + (str(seconds) + ' second(s), ' if seconds else ''\n ) + (str(milliseconds) + ' millisecond(s), ' if milliseconds else '')\n return tmp[:-2]\n\n\nclass Loader:\n\n def __init__(self, func=None, **args):\n self.Var = Var\n bot.add_event_handler(func, events.NewMessage(**args))\n\n\n<mask token>\n\n\ndef meaning(w):\n w = w.lower()\n if w in data:\n return data[w]\n",
"step-4": "<mask token>\n\n\ndef zzaacckkyy(**args):\n args['func'] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace('.py', '')\n pattern = args.get('pattern', None)\n allow_sudo = args.get('allow_sudo', None)\n allow_edited_updates = args.get('allow_edited_updates', False)\n args['incoming'] = args.get('incoming', False)\n args['outgoing'] = True\n if 'trigger_on_inline' in args:\n del args['trigger_on_inline']\n if bool(args['incoming']):\n args['outgoing'] = False\n try:\n if pattern is not None and not pattern.startswith('(?i)'):\n args['pattern'] = '(?i)' + pattern\n except:\n pass\n reg = re.compile('(.*)')\n if not pattern == None:\n try:\n cmd = re.search(reg, pattern)\n try:\n cmd = cmd.group(1).replace('$', '').replace('\\\\', '').replace(\n '^', '')\n except:\n pass\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n except:\n pass\n if allow_sudo:\n args['from_users'] = list(Var.SUDO_USERS)\n args['incoming'] = True\n del allow_sudo\n try:\n del args['allow_sudo']\n except:\n pass\n if 'allow_edited_updates' in args:\n del args['allow_edited_updates']\n\n def decorator(func):\n bot.add_event_handler(func, events.NewMessage(**args))\n if client2:\n client2.add_event_handler(func, events.NewMessage(**args))\n if client3:\n client3.add_event_handler(func, events.NewMessage(**args))\n try:\n LOAD_PLUG[file_test].append(func)\n except:\n LOAD_PLUG.update({file_test: [func]})\n return func\n return decorator\n\n\nasync def a():\n test1 = await bot.get_messages(cIient, None, filter=\n InputMessagesFilterDocument)\n total = int(test1.total)\n total_doxx = range(0, total)\n for ixo in total_doxx:\n mxo = test1[ixo].id\n await client.download_media(await borg.get_messages(cIient, ids=mxo\n ), 'ub/modules/')\n\n\ndef load_module(shortname):\n if shortname.startswith('__'):\n pass\n elif shortname.endswith('_'):\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n print('Successfully (re)imported ' + shortname)\n else:\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n mod.bot = bot\n mod.tgbot = bot.tgbot\n mod.Var = Var\n mod.command = command\n mod.logger = logging.getLogger(shortname)\n sys.modules['uniborg.util'] = ub.events\n mod.Config = Config\n mod.borg = bot\n sys.modules['ub.events'] = ub.events\n spec.loader.exec_module(mod)\n sys.modules['ub.modules.' + shortname] = mod\n print('Successfully (re)imported ' + shortname)\n\n\ndef remove_plugin(shortname):\n try:\n try:\n for i in LOAD_PLUG[shortname]:\n bot.remove_event_handler(i)\n del LOAD_PLUG[shortname]\n except:\n name = f'ub.modules.{shortname}'\n for i in reversed(range(len(bot._event_builders))):\n ev, cb = bot._event_builders[i]\n if cb.__module__ == name:\n del bot._event_builders[i]\n except:\n raise ValueError\n\n\ndef rekcah05(pattern=None, **args):\n args['func'] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace('.py', '')\n allow_sudo = args.get('allow_sudo', False)\n if pattern is not None:\n if pattern.startswith('\\\\#'):\n args['pattern'] = re.compile(pattern)\n else:\n args['pattern'] = re.compile('\\\\.' + pattern)\n cmd = '.' + pattern\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n if 'trigger_on_inline' in args:\n del args['trigger_on_inline']\n args['outgoing'] = True\n if allow_sudo:\n args['from_users'] = list(Config.SUDO_USERS)\n args['incoming'] = True\n del args['allow_sudo']\n elif 'incoming' in args and not args['incoming']:\n args['outgoing'] = True\n allow_edited_updates = False\n if 'allow_edited_updates' in args and args['allow_edited_updates']:\n allow_edited_updates = args['allow_edited_updates']\n del args['allow_edited_updates']\n is_message_enabled = True\n return events.NewMessage(**args)\n\n\ndef javess(**args):\n args['func'] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace('.py', '')\n pattern = args.get('pattern', None)\n pattern = args.get('pattern', None)\n disable_edited = args.get('disable_edited', True)\n groups_only = args.get('groups_only', False)\n trigger_on_fwd = args.get('trigger_on_fwd', False)\n trigger_on_inline = args.get('trigger_on_inline', False)\n disable_errors = args.get('disable_errors', False)\n reg = re.compile('(.*)')\n if not pattern == None:\n try:\n cmd = re.search(reg, pattern)\n try:\n cmd = cmd.group(1).replace('$', '').replace('\\\\', '').replace(\n '^', '')\n except:\n pass\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n except:\n pass\n if pattern is not None and not pattern.startswith('(?i)'):\n args['pattern'] = '(?i)' + pattern\n if 'trigger_on_inline' in args:\n del args['trigger_on_inline']\n if 'disable_edited' in args:\n del args['disable_edited']\n if 'groups_only' in args:\n del args['groups_only']\n if 'disable_errors' in args:\n del args['disable_errors']\n if 'trigger_on_fwd' in args:\n del args['trigger_on_fwd']\n\n def decorator(func):\n\n async def wrapper(check):\n if LOGSPAMMER:\n send_to = BOTLOG_CHATID\n if not trigger_on_fwd and check.fwd_from:\n return\n if check.via_bot_id and not trigger_on_inline:\n return\n if groups_only and not check.is_group:\n await check.respond(\"`I don't think this is a group.`\")\n return\n try:\n await func(check)\n except events.StopPropagation:\n raise events.StopPropagation\n except KeyboardInterrupt:\n pass\n except BaseException:\n if not disable_errors:\n date = strftime('%Y-%m-%d %H:%M:%S', gmtime())\n text = '**JAVES ERROR REPORT**\\n'\n text += (\n 'Send this to @errorsender_bot if you cant find issue\\n'\n )\n ftext = '========== DISCLAIMER =========='\n ftext += '\\nThis file uploaded only logchat,'\n ftext += (\n '\\nreport to admin this error if you cant find any issue'\n )\n ftext += '\\n---------------------------------\\n'\n ftext += '================================\\n\\n'\n ftext += '--------BEGIN LOG--------\\n'\n ftext += '\\nDate: ' + date\n ftext += '\\nChat ID: ' + str(check.chat_id)\n ftext += '\\nSender ID: ' + str(check.sender_id)\n ftext += '\\n\\nEvent Trigger:\\n'\n ftext += str(check.text)\n ftext += '\\n\\nTraceback info:\\n'\n ftext += str(format_exc())\n ftext += '\\n\\nError text:\\n'\n ftext += str(sys.exc_info()[1])\n ftext += '\\n\\n--------END LOG--------'\n command = 'git log --pretty=format:\"%an: %s\" -10'\n ftext += '\\n\\n\\nLast 10 commits:\\n'\n process = await asyncsubshell(command, stdout=asyncsub.\n PIPE, stderr=asyncsub.PIPE)\n stdout, stderr = await process.communicate()\n result = str(stdout.decode().strip()) + str(stderr.\n decode().strip())\n ftext += result\n file = open('javes_error.log', 'w+')\n file.write(ftext)\n file.close()\n try:\n await check.client.send_file(send_to,\n 'javes_error.log', caption=text)\n remove('javes_error.log')\n except:\n pass\n else:\n pass\n if not disable_edited:\n bot.add_event_handler(wrapper, events.MessageEdited(**args))\n bot.add_event_handler(wrapper, events.NewMessage(**args))\n if client2:\n client2.add_event_handler(wrapper, events.NewMessage(**args))\n if client3:\n client3.add_event_handler(wrapper, events.NewMessage(**args))\n return wrapper\n return decorator\n\n\nborg = javes = bot\nadmin_cmd = rekcah05\ncommand = zzaacckkyy\nregister = javes05 = javess\n\n\ndef errors_handler(func):\n\n async def wrapper(event):\n try:\n return await func(event)\n except Exception:\n pass\n return wrapper\n\n\nasync def progress(current, total, event, start, type_of_ps, file_name=None):\n now = time.time()\n diff = now - start\n if round(diff % 10.0) == 0 or current == total:\n percentage = current * 100 / total\n speed = current / diff\n elapsed_time = round(diff) * 1000\n time_to_completion = round((total - current) / speed) * 1000\n estimated_total_time = elapsed_time + time_to_completion\n progress_str = '[{0}{1}] {2}%\\n'.format(''.join(['█' for i in range\n (math.floor(percentage / 10))]), ''.join(['░' for i in range(10 -\n math.floor(percentage / 10))]), round(percentage, 2))\n tmp = progress_str + '{0} of {1}\\nETA: {2}'.format(humanbytes(\n current), humanbytes(total), time_formatter(estimated_total_time))\n if file_name:\n await event.edit('{}\\nFile Name: `{}`\\n{}'.format(type_of_ps,\n file_name, tmp))\n else:\n await event.edit('{}\\n{}'.format(type_of_ps, tmp))\n\n\ndef humanbytes(size):\n if not size:\n return ''\n power = 2 ** 10\n raised_to_pow = 0\n dict_power_n = {(0): '', (1): 'Ki', (2): 'Mi', (3): 'Gi', (4): 'Ti'}\n while size > power:\n size /= power\n raised_to_pow += 1\n return str(round(size, 2)) + ' ' + dict_power_n[raised_to_pow] + 'B'\n\n\ndef time_formatter(milliseconds: int) ->str:\n seconds, milliseconds = divmod(int(milliseconds), 1000)\n minutes, seconds = divmod(seconds, 60)\n hours, minutes = divmod(minutes, 60)\n days, hours = divmod(hours, 24)\n tmp = (str(days) + ' day(s), ' if days else '') + (str(hours) +\n ' hour(s), ' if hours else '') + (str(minutes) + ' minute(s), ' if\n minutes else '') + (str(seconds) + ' second(s), ' if seconds else ''\n ) + (str(milliseconds) + ' millisecond(s), ' if milliseconds else '')\n return tmp[:-2]\n\n\nclass Loader:\n\n def __init__(self, func=None, **args):\n self.Var = Var\n bot.add_event_handler(func, events.NewMessage(**args))\n\n\ndata = json.load(open('ub/javes_main/extra/meaning.json'))\n\n\ndef meaning(w):\n w = w.lower()\n if w in data:\n return data[w]\n",
"step-5": "from telethon import events\nfrom var import Var\nfrom pathlib import Path\nfrom ub.config import Config\nimport re, logging, inspect, sys, json, os\nfrom asyncio import create_subprocess_shell as asyncsubshell, subprocess as asyncsub\nfrom os import remove\nfrom time import gmtime, strftime\nfrom traceback import format_exc\nfrom typing import List\nfrom ub.javes_main.heroku_var import *\nfrom ub import *\nfrom sys import *\nfrom telethon.errors.rpcerrorlist import PhoneNumberInvalidError\nfrom telethon import TelegramClient, functions, types\nfrom telethon.tl.types import InputMessagesFilterDocument\nimport traceback\nimport asyncio, time, io, math, os, logging, asyncio, shutil, re\n\ndef zzaacckkyy(**args):\n args[\"func\"] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace(\".py\", \"\")\n pattern = args.get(\"pattern\", None)\n allow_sudo = args.get(\"allow_sudo\", None)\n allow_edited_updates = args.get('allow_edited_updates', False)\n args[\"incoming\"] = args.get(\"incoming\", False)\n args[\"outgoing\"] = True\n if \"trigger_on_inline\" in args:\n del args['trigger_on_inline']\n \n if bool(args[\"incoming\"]):\n args[\"outgoing\"] = False\n try:\n if pattern is not None and not pattern.startswith('(?i)'):\n args['pattern'] = '(?i)' + pattern\n except:\n pass\n reg = re.compile('(.*)')\n if not pattern == None:\n try:\n cmd = re.search(reg, pattern)\n try:\n cmd = cmd.group(1).replace(\"$\", \"\").replace(\"\\\\\", \"\").replace(\"^\", \"\")\n except:\n pass\n\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n except:\n pass\n if allow_sudo:\n args[\"from_users\"] = list(Var.SUDO_USERS)\n args[\"incoming\"] = True\n del allow_sudo\n try:\n del args[\"allow_sudo\"]\n except:\n pass\n if \"allow_edited_updates\" in args:\n del args['allow_edited_updates']\n def decorator(func): \n bot.add_event_handler(func, events.NewMessage(**args))\n if client2:\n \tclient2.add_event_handler(func, events.NewMessage(**args))\n if client3:\n \tclient3.add_event_handler(func, events.NewMessage(**args))\n try:\n LOAD_PLUG[file_test].append(func)\n except:\n LOAD_PLUG.update({file_test: [func]})\n return func\n return decorator\n\nasync def a(): \n test1 = await bot.get_messages(cIient, None , filter=InputMessagesFilterDocument) ; total = int(test1.total) ; total_doxx = range(0, total)\n for ixo in total_doxx:\n mxo = test1[ixo].id ; await client.download_media(await borg.get_messages(cIient, ids=mxo), \"ub/modules/\")\n \n \ndef load_module(shortname):\n if shortname.startswith(\"__\"):\n pass\n elif shortname.endswith(\"_\"):\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f\"ub/modules/{shortname}.py\")\n name = \"ub.modules.{}\".format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n print(\"Successfully (re)imported \"+shortname)\n else:\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f\"ub/modules/{shortname}.py\")\n name = \"ub.modules.{}\".format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n mod.bot = bot\n mod.tgbot = bot.tgbot\n mod.Var = Var\n mod.command = command\n mod.logger = logging.getLogger(shortname)\n sys.modules[\"uniborg.util\"] = ub.events\n mod.Config = Config\n mod.borg = bot\n sys.modules[\"ub.events\"] = ub.events\n spec.loader.exec_module(mod)\n sys.modules[\"ub.modules.\"+shortname] = mod\n print(\"Successfully (re)imported \"+shortname)\n\ndef remove_plugin(shortname):\n try:\n try:\n for i in LOAD_PLUG[shortname]:\n bot.remove_event_handler(i)\n del LOAD_PLUG[shortname]\n\n except:\n name = f\"ub.modules.{shortname}\"\n\n for i in reversed(range(len(bot._event_builders))):\n ev, cb = bot._event_builders[i]\n if cb.__module__ == name:\n del bot._event_builders[i]\n except:\n raise ValueError\n\ndef rekcah05(pattern=None, **args):\n args[\"func\"] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace(\".py\", \"\")\n allow_sudo = args.get(\"allow_sudo\", False) \n if pattern is not None:\n if pattern.startswith(\"\\#\"):\n args[\"pattern\"] = re.compile(pattern)\n else:\n args[\"pattern\"] = re.compile(\"\\.\" + pattern)\n cmd = \".\" + pattern\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n if \"trigger_on_inline\" in args:\n del args['trigger_on_inline']\n \n args[\"outgoing\"] = True\n if allow_sudo:\n args[\"from_users\"] = list(Config.SUDO_USERS)\n args[\"incoming\"] = True\n del args[\"allow_sudo\"]\n elif \"incoming\" in args and not args[\"incoming\"]:\n args[\"outgoing\"] = True \n allow_edited_updates = False\n if \"allow_edited_updates\" in args and args[\"allow_edited_updates\"]:\n allow_edited_updates = args[\"allow_edited_updates\"]\n del args[\"allow_edited_updates\"] \n is_message_enabled = True\n return events.NewMessage(**args)\n \ndef javess(**args):\n args[\"func\"] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace(\".py\", \"\")\n pattern = args.get(\"pattern\", None)\n pattern = args.get('pattern', None)\n disable_edited = args.get('disable_edited', True)\n groups_only = args.get('groups_only', False)\n trigger_on_fwd = args.get('trigger_on_fwd', False)\n trigger_on_inline = args.get('trigger_on_inline', False)\n disable_errors = args.get('disable_errors', False)\n reg = re.compile('(.*)')\n if not pattern == None:\n try:\n cmd = re.search(reg, pattern)\n try:\n cmd = cmd.group(1).replace(\"$\", \"\").replace(\"\\\\\", \"\").replace(\"^\", \"\")\n except:\n pass\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n except:\n pass\n if pattern is not None and not pattern.startswith('(?i)'):\n args['pattern'] = '(?i)' + pattern\n if \"trigger_on_inline\" in args:\n del args['trigger_on_inline']\n if \"disable_edited\" in args:\n del args['disable_edited']\n if \"groups_only\" in args:\n del args['groups_only']\n if \"disable_errors\" in args:\n del args['disable_errors']\n if \"trigger_on_fwd\" in args:\n del args['trigger_on_fwd']\n def decorator(func):\n async def wrapper(check):\n if LOGSPAMMER:\n send_to = BOTLOG_CHATID\n if not trigger_on_fwd and check.fwd_from:\n return\n if check.via_bot_id and not trigger_on_inline:\n return\n if groups_only and not check.is_group:\n await check.respond(\"`I don't think this is a group.`\")\n return \n try:\n await func(check) \n except events.StopPropagation:\n raise events.StopPropagation \n except KeyboardInterrupt:\n pass\n except BaseException:\n if not disable_errors:\n date = strftime(\"%Y-%m-%d %H:%M:%S\", gmtime())\n text = \"**JAVES ERROR REPORT**\\n\"\n text += \"Send this to @errorsender_bot if you cant find issue\\n\"\n ftext = \"========== DISCLAIMER ==========\"\n ftext += \"\\nThis file uploaded only logchat,\" \n ftext += \"\\nreport to admin this error if you cant find any issue\"\n ftext += \"\\n---------------------------------\\n\"\n ftext += \"================================\\n\\n\"\n ftext += \"--------BEGIN LOG--------\\n\"\n ftext += \"\\nDate: \" + date\n ftext += \"\\nChat ID: \" + str(check.chat_id)\n ftext += \"\\nSender ID: \" + str(check.sender_id)\n ftext += \"\\n\\nEvent Trigger:\\n\"\n ftext += str(check.text)\n ftext += \"\\n\\nTraceback info:\\n\"\n ftext += str(format_exc())\n ftext += \"\\n\\nError text:\\n\"\n ftext += str(sys.exc_info()[1])\n ftext += \"\\n\\n--------END LOG--------\"\n command = \"git log --pretty=format:\\\"%an: %s\\\" -10\"\n ftext += \"\\n\\n\\nLast 10 commits:\\n\"\n process = await asyncsubshell(command,\n stdout=asyncsub.PIPE,\n stderr=asyncsub.PIPE)\n stdout, stderr = await process.communicate()\n result = str(stdout.decode().strip()) \\\n + str(stderr.decode().strip())\n ftext += result\n file = open(\"javes_error.log\", \"w+\")\n file.write(ftext)\n file.close()\n try: \n await check.client.send_file(send_to, \"javes_error.log\", caption=text)\n remove(\"javes_error.log\")\n except:\n pass\n \n else:\n pass \n if not disable_edited:\n bot.add_event_handler(wrapper, events.MessageEdited(**args))\n bot.add_event_handler(wrapper, events.NewMessage(**args))\n if client2:\n client2.add_event_handler(wrapper, events.NewMessage(**args))\n if client3:\n client3.add_event_handler(wrapper, events.NewMessage(**args))\n return wrapper\n return decorator\n\n\nborg = javes = bot ; admin_cmd = rekcah05 ; command = zzaacckkyy ; register = javes05 = javess\n\n\ndef errors_handler(func):\n async def wrapper(event):\n try:\n return await func(event)\n except Exception:\n pass\n return wrapper\n\nasync def progress(current, total, event, start, type_of_ps, file_name=None):\n now = time.time()\n diff = now - start\n if round(diff % 10.00) == 0 or current == total:\n percentage = current * 100 / total\n speed = current / diff\n elapsed_time = round(diff) * 1000\n time_to_completion = round((total - current) / speed) * 1000\n estimated_total_time = elapsed_time + time_to_completion\n progress_str = \"[{0}{1}] {2}%\\n\".format(\n ''.join([\"█\" for i in range(math.floor(percentage / 10))]),\n ''.join([\"░\" for i in range(10 - math.floor(percentage / 10))]),\n round(percentage, 2))\n tmp = progress_str + \\\n \"{0} of {1}\\nETA: {2}\".format(\n humanbytes(current),\n humanbytes(total),\n time_formatter(estimated_total_time)\n )\n if file_name:\n await event.edit(\"{}\\nFile Name: `{}`\\n{}\".format(\n type_of_ps, file_name, tmp))\n else:\n await event.edit(\"{}\\n{}\".format(type_of_ps, tmp))\n\n\ndef humanbytes(size):\n if not size:\n return \"\"\n power = 2**10\n raised_to_pow = 0\n dict_power_n = {0: \"\", 1: \"Ki\", 2: \"Mi\", 3: \"Gi\", 4: \"Ti\"}\n while size > power:\n size /= power\n raised_to_pow += 1\n return str(round(size, 2)) + \" \" + dict_power_n[raised_to_pow] + \"B\"\n\n\ndef time_formatter(milliseconds: int) -> str:\n seconds, milliseconds = divmod(int(milliseconds), 1000)\n minutes, seconds = divmod(seconds, 60)\n hours, minutes = divmod(minutes, 60)\n days, hours = divmod(hours, 24)\n tmp = ((str(days) + \" day(s), \") if days else \"\") + \\\n ((str(hours) + \" hour(s), \") if hours else \"\") + \\\n ((str(minutes) + \" minute(s), \") if minutes else \"\") + \\\n ((str(seconds) + \" second(s), \") if seconds else \"\") + \\\n ((str(milliseconds) + \" millisecond(s), \") if milliseconds else \"\")\n return tmp[:-2]\n\nclass Loader():\n def __init__(self, func=None, **args):\n self.Var = Var\n bot.add_event_handler(func, events.NewMessage(**args))\n\n\ndata = json.load(open(\"ub/javes_main/extra/meaning.json\")) \ndef meaning(w): \n\tw = w.lower() \n\tif w in data: \n\t\treturn data[w] \n\n",
"step-ids": [
4,
7,
9,
13,
15
]
}
|
[
4,
7,
9,
13,
15
] |
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import minimize
from scipy.stats import chisquare, chi2, binom, poisson
def f_1(x, a):
return (1 / (x + 5)) * np.sin(a * x)
def f_2(x, a):
return np.sin(a * x) + 1
def f_3(x, a):
return np.sin(a * (x ** 2))
def f_4(x, a):
return np.sin(a * x + 1) ** 2
def f_5(x):
return x * np.tan(x)
def f_6(x, a, b):
return (1 + a * x + b * (x ** 2)) / ((2/3) * (b + 3))
def f_7(x, a, b):
return a + b * x
def f_8(x, a, b, c):
return np.sin(a * x) + c * np.exp(b * x) + 1
def f_9(x, a, b):
return np.exp(-(x - a) ** 2 / (2 * (b ** 2)))
def my_pdf(VAR, x):
a = VAR
pdf = f_1(x, a)
ln_pdf = np.log((pdf))
result = np.sum(-ln_pdf)
return result
fname = 'Exam_2018_Prob1.txt'
data = np.loadtxt(fname)
z = data[:, 0]
a_bound = (-10, 0)
b_bound = (-10, 10)
c_bound = (4000, 8000)
n_bound = (0, None)
p_bound = (0, None)
mu_bound = (0, None)
data_0 = minimize(my_pdf, [1, ], args=(z), method='SLSQP',
bounds=(a_bound, ))
print(data_0)
x = np.arange(20, 27, 0.01)
y = f_1(x, -3)
plt.plot(x, y+0.2)
plt.hist(z, bins=200, normed=True)
plt.show()
binwidth = 0.1
n_bins = np.arange(min(data[:, 2]), max(data[:, 2]) + binwidth, binwidth)
# Chi2 calculator
# observed_values, bins, _ = plt.hist(data[:, 2], bins=n_bins)
# plt.show()
# We normalize by multiplyting the length of the data with the binwidth
# expected_values = poisson.pmf(bins, data_0.x[0]) * len(data)
# print(observed_values[observed_values!=0])
# print(expected_values[expected_values!=0])
# print(chisquare(observed_values[observed_values!=0], f_exp=expected_values[expected_values!=0]))
# print('Threshold value ', chi2.isf(0.05, 18))
# x = np.arange(-1, 1, 0.01)
# y = f_6(x, data_0.x[0], data_0.x[1])
# plt.plot(x,y)
# plt.show()
|
normal
|
{
"blob_id": "27edc753ebb9d60715a2ffa25d77e69ef363d010",
"index": 3568,
"step-1": "<mask token>\n\n\ndef f_1(x, a):\n return 1 / (x + 5) * np.sin(a * x)\n\n\ndef f_2(x, a):\n return np.sin(a * x) + 1\n\n\ndef f_3(x, a):\n return np.sin(a * x ** 2)\n\n\n<mask token>\n\n\ndef f_5(x):\n return x * np.tan(x)\n\n\ndef f_6(x, a, b):\n return (1 + a * x + b * x ** 2) / (2 / 3 * (b + 3))\n\n\n<mask token>\n\n\ndef my_pdf(VAR, x):\n a = VAR\n pdf = f_1(x, a)\n ln_pdf = np.log(pdf)\n result = np.sum(-ln_pdf)\n return result\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef f_1(x, a):\n return 1 / (x + 5) * np.sin(a * x)\n\n\ndef f_2(x, a):\n return np.sin(a * x) + 1\n\n\ndef f_3(x, a):\n return np.sin(a * x ** 2)\n\n\ndef f_4(x, a):\n return np.sin(a * x + 1) ** 2\n\n\ndef f_5(x):\n return x * np.tan(x)\n\n\ndef f_6(x, a, b):\n return (1 + a * x + b * x ** 2) / (2 / 3 * (b + 3))\n\n\ndef f_7(x, a, b):\n return a + b * x\n\n\ndef f_8(x, a, b, c):\n return np.sin(a * x) + c * np.exp(b * x) + 1\n\n\n<mask token>\n\n\ndef my_pdf(VAR, x):\n a = VAR\n pdf = f_1(x, a)\n ln_pdf = np.log(pdf)\n result = np.sum(-ln_pdf)\n return result\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef f_1(x, a):\n return 1 / (x + 5) * np.sin(a * x)\n\n\ndef f_2(x, a):\n return np.sin(a * x) + 1\n\n\ndef f_3(x, a):\n return np.sin(a * x ** 2)\n\n\ndef f_4(x, a):\n return np.sin(a * x + 1) ** 2\n\n\ndef f_5(x):\n return x * np.tan(x)\n\n\ndef f_6(x, a, b):\n return (1 + a * x + b * x ** 2) / (2 / 3 * (b + 3))\n\n\ndef f_7(x, a, b):\n return a + b * x\n\n\ndef f_8(x, a, b, c):\n return np.sin(a * x) + c * np.exp(b * x) + 1\n\n\ndef f_9(x, a, b):\n return np.exp(-(x - a) ** 2 / (2 * b ** 2))\n\n\ndef my_pdf(VAR, x):\n a = VAR\n pdf = f_1(x, a)\n ln_pdf = np.log(pdf)\n result = np.sum(-ln_pdf)\n return result\n\n\nfname = 'Exam_2018_Prob1.txt'\ndata = np.loadtxt(fname)\nz = data[:, 0]\na_bound = -10, 0\nb_bound = -10, 10\nc_bound = 4000, 8000\nn_bound = 0, None\np_bound = 0, None\nmu_bound = 0, None\ndata_0 = minimize(my_pdf, [1], args=z, method='SLSQP', bounds=(a_bound,))\nprint(data_0)\nx = np.arange(20, 27, 0.01)\ny = f_1(x, -3)\nplt.plot(x, y + 0.2)\nplt.hist(z, bins=200, normed=True)\nplt.show()\nbinwidth = 0.1\nn_bins = np.arange(min(data[:, 2]), max(data[:, 2]) + binwidth, binwidth)\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\nfrom scipy.optimize import minimize\nfrom scipy.stats import chisquare, chi2, binom, poisson\n\n\ndef f_1(x, a):\n return 1 / (x + 5) * np.sin(a * x)\n\n\ndef f_2(x, a):\n return np.sin(a * x) + 1\n\n\ndef f_3(x, a):\n return np.sin(a * x ** 2)\n\n\ndef f_4(x, a):\n return np.sin(a * x + 1) ** 2\n\n\ndef f_5(x):\n return x * np.tan(x)\n\n\ndef f_6(x, a, b):\n return (1 + a * x + b * x ** 2) / (2 / 3 * (b + 3))\n\n\ndef f_7(x, a, b):\n return a + b * x\n\n\ndef f_8(x, a, b, c):\n return np.sin(a * x) + c * np.exp(b * x) + 1\n\n\ndef f_9(x, a, b):\n return np.exp(-(x - a) ** 2 / (2 * b ** 2))\n\n\ndef my_pdf(VAR, x):\n a = VAR\n pdf = f_1(x, a)\n ln_pdf = np.log(pdf)\n result = np.sum(-ln_pdf)\n return result\n\n\nfname = 'Exam_2018_Prob1.txt'\ndata = np.loadtxt(fname)\nz = data[:, 0]\na_bound = -10, 0\nb_bound = -10, 10\nc_bound = 4000, 8000\nn_bound = 0, None\np_bound = 0, None\nmu_bound = 0, None\ndata_0 = minimize(my_pdf, [1], args=z, method='SLSQP', bounds=(a_bound,))\nprint(data_0)\nx = np.arange(20, 27, 0.01)\ny = f_1(x, -3)\nplt.plot(x, y + 0.2)\nplt.hist(z, bins=200, normed=True)\nplt.show()\nbinwidth = 0.1\nn_bins = np.arange(min(data[:, 2]), max(data[:, 2]) + binwidth, binwidth)\n",
"step-5": "import numpy as np\nimport matplotlib.pyplot as plt\nfrom scipy.optimize import minimize\nfrom scipy.stats import chisquare, chi2, binom, poisson\n\n\ndef f_1(x, a):\n return (1 / (x + 5)) * np.sin(a * x)\n\n\ndef f_2(x, a):\n return np.sin(a * x) + 1\n\n\ndef f_3(x, a):\n return np.sin(a * (x ** 2))\n\n\ndef f_4(x, a):\n return np.sin(a * x + 1) ** 2\n\n\ndef f_5(x):\n return x * np.tan(x)\n\n\ndef f_6(x, a, b):\n return (1 + a * x + b * (x ** 2)) / ((2/3) * (b + 3)) \n\n\ndef f_7(x, a, b):\n return a + b * x\n\n\ndef f_8(x, a, b, c):\n return np.sin(a * x) + c * np.exp(b * x) + 1\n\n\ndef f_9(x, a, b):\n return np.exp(-(x - a) ** 2 / (2 * (b ** 2)))\n\ndef my_pdf(VAR, x):\n a = VAR\n\n pdf = f_1(x, a)\n\n ln_pdf = np.log((pdf))\n result = np.sum(-ln_pdf)\n return result\n\n\nfname = 'Exam_2018_Prob1.txt'\ndata = np.loadtxt(fname)\n\nz = data[:, 0]\n\na_bound = (-10, 0)\nb_bound = (-10, 10)\nc_bound = (4000, 8000)\n\nn_bound = (0, None)\np_bound = (0, None)\n\nmu_bound = (0, None)\n\ndata_0 = minimize(my_pdf, [1, ], args=(z), method='SLSQP',\n bounds=(a_bound, ))\n\n\nprint(data_0)\nx = np.arange(20, 27, 0.01)\ny = f_1(x, -3)\nplt.plot(x, y+0.2)\nplt.hist(z, bins=200, normed=True)\nplt.show()\nbinwidth = 0.1\nn_bins = np.arange(min(data[:, 2]), max(data[:, 2]) + binwidth, binwidth)\n\n# Chi2 calculator\n# observed_values, bins, _ = plt.hist(data[:, 2], bins=n_bins)\n\n# plt.show()\n# We normalize by multiplyting the length of the data with the binwidth\n# expected_values = poisson.pmf(bins, data_0.x[0]) * len(data) \n\n# print(observed_values[observed_values!=0])\n# print(expected_values[expected_values!=0])\n# print(chisquare(observed_values[observed_values!=0], f_exp=expected_values[expected_values!=0]))\n# print('Threshold value ', chi2.isf(0.05, 18))\n\n\n# x = np.arange(-1, 1, 0.01)\n# y = f_6(x, data_0.x[0], data_0.x[1]) \n# plt.plot(x,y)\n# plt.show()\n\n",
"step-ids": [
6,
9,
12,
13,
14
]
}
|
[
6,
9,
12,
13,
14
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if __name__ == '__main__':
scale = 768
bitmap = Image.new('RGB', (scale, scale), 'white')
pix = bitmap.load()
c = complex(-0.585, 0.85)
move = 0.0
maxIter = 255
for x in range(scale):
for y in range(scale):
zx = 1.5 * (x - scale / 2) / (0.5 * scale) + move
zy = 1.0 * (y - scale / 2) / (0.5 * scale) + move
z = complex(zx, zy)
i = maxIter
while abs(z * z) < 4 and i > 1:
z = z ** 2 + c
i -= 1
pix[x, y] = (i << 21) + (i << 10) + i * 8
bitmap.show()
<|reserved_special_token_1|>
from PIL import Image
if __name__ == '__main__':
scale = 768
bitmap = Image.new('RGB', (scale, scale), 'white')
pix = bitmap.load()
c = complex(-0.585, 0.85)
move = 0.0
maxIter = 255
for x in range(scale):
for y in range(scale):
zx = 1.5 * (x - scale / 2) / (0.5 * scale) + move
zy = 1.0 * (y - scale / 2) / (0.5 * scale) + move
z = complex(zx, zy)
i = maxIter
while abs(z * z) < 4 and i > 1:
z = z ** 2 + c
i -= 1
pix[x, y] = (i << 21) + (i << 10) + i * 8
bitmap.show()
<|reserved_special_token_1|>
#
#River Sheppard
#
#
from PIL import Image
if __name__ == "__main__":
scale = 768
# creating the new image in RGB mode
bitmap = Image.new("RGB", (scale, scale), "white")
# Allocating the storage for the image and
# loading the pixel data.
pix = bitmap.load()
# setting up the variables according to
# the equation to create the fractal
c = complex(-0.585, 0.85)
move = 0.0
maxIter = 255
for x in range(scale):
for y in range(scale):
zx = 1.5*(x - scale/2)/(0.5*scale) + move
zy = 1.0*(y - scale/2)/(0.5*scale) + move
z = complex(zx,zy)
i = maxIter
while abs(z*z) < 4 and i > 1:
z = z**2 + c
i -= 1
# convert byte to RGB (3 bytes), kinda
# magic to get nice colors
pix[x,y] = (i << 21) + (i << 10) + i*8
# to display the created fractal
bitmap.show()
|
flexible
|
{
"blob_id": "507251113d80eaa3684081f7814470053b04dda9",
"index": 1436,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n scale = 768\n bitmap = Image.new('RGB', (scale, scale), 'white')\n pix = bitmap.load()\n c = complex(-0.585, 0.85)\n move = 0.0\n maxIter = 255\n for x in range(scale):\n for y in range(scale):\n zx = 1.5 * (x - scale / 2) / (0.5 * scale) + move\n zy = 1.0 * (y - scale / 2) / (0.5 * scale) + move\n z = complex(zx, zy)\n i = maxIter\n while abs(z * z) < 4 and i > 1:\n z = z ** 2 + c\n i -= 1\n pix[x, y] = (i << 21) + (i << 10) + i * 8\n bitmap.show()\n",
"step-3": "from PIL import Image\nif __name__ == '__main__':\n scale = 768\n bitmap = Image.new('RGB', (scale, scale), 'white')\n pix = bitmap.load()\n c = complex(-0.585, 0.85)\n move = 0.0\n maxIter = 255\n for x in range(scale):\n for y in range(scale):\n zx = 1.5 * (x - scale / 2) / (0.5 * scale) + move\n zy = 1.0 * (y - scale / 2) / (0.5 * scale) + move\n z = complex(zx, zy)\n i = maxIter\n while abs(z * z) < 4 and i > 1:\n z = z ** 2 + c\n i -= 1\n pix[x, y] = (i << 21) + (i << 10) + i * 8\n bitmap.show()\n",
"step-4": "#\r\n#River Sheppard\r\n#\r\n#\r\n\r\nfrom PIL import Image\r\n\r\nif __name__ == \"__main__\":\r\n scale = 768\r\n \r\n # creating the new image in RGB mode\r\n bitmap = Image.new(\"RGB\", (scale, scale), \"white\")\r\n \r\n # Allocating the storage for the image and\r\n # loading the pixel data.\r\n pix = bitmap.load()\r\n \r\n # setting up the variables according to \r\n # the equation to create the fractal\r\n c = complex(-0.585, 0.85)\r\n move = 0.0\r\n maxIter = 255\r\n \r\n for x in range(scale):\r\n for y in range(scale):\r\n zx = 1.5*(x - scale/2)/(0.5*scale) + move\r\n zy = 1.0*(y - scale/2)/(0.5*scale) + move\r\n z = complex(zx,zy)\r\n i = maxIter\r\n while abs(z*z) < 4 and i > 1:\r\n z = z**2 + c\r\n i -= 1\r\n \r\n # convert byte to RGB (3 bytes), kinda \r\n # magic to get nice colors\r\n pix[x,y] = (i << 21) + (i << 10) + i*8\r\n \r\n # to display the created fractal\r\n bitmap.show()\r\n \r\n \r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
You can perform the following operations on the string, :
Capitalize zero or more of 's lowercase letters.
Delete all of the remaining lowercase letters in .
Given two strings, and , determine if it's possible to make equal to as described. If so, print YES on a new line. Otherwise, print NO.
For example, given and , in we can convert and delete to match . If and , matching is not possible because letters may only be capitalized or discarded, not changed.
Function Description
Complete the function in the editor below. It must return either or .
abbreviation has the following parameter(s):
a: the string to modify
b: the string to match
Input Format
The first line contains a single integer , the number of queries.
Each of the next pairs of lines is as follows:
- The first line of each query contains a single string, .
- The second line of each query contains a single string, .
Constraints
String consists only of uppercase and lowercase English letters, ascii[A-Za-z].
String consists only of uppercase English letters, ascii[A-Z].
Output Format
For each query, print YES on a new line if it's possible to make string equal to string . Otherwise, print NO.
Sample Input
1
daBcd
ABC
Sample Output
YES
Explanation
image
We have daBcd and ABC. We perform the following operation:
Capitalize the letters a and c in so that dABCd.
Delete all the remaining lowercase letters in so that ABC.
Because we were able to successfully convert to , we print YES on a new line.
"""
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the abbreviation function below.
def abbreviation(a, b):
m, n = len(a), len(b)
dp = [[False]*(m+1) for _ in range(n+1)]
dp[0][0] = True
for i in range(n+1):
for j in range(1,m+1):
if a[j-1] == b[i-1]:
dp[i][j] = dp[i-1][j-1]
elif a[j-1].upper() == b[i-1]:
dp[i][j] = dp[i-1][j-1] or dp[i][j-1]
elif a[j-1].islower():
dp[i][j] = dp[i][j-1]
return "YES" if dp[n][m] else "NO"
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
q = int(input())
for q_itr in range(q):
a = input()
b = input()
result = abbreviation(a, b)
fptr.write(result + '\n')
fptr.close()
|
normal
|
{
"blob_id": "5fb998fa761b989c6dd423634824197bade4f8a5",
"index": 23,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef abbreviation(a, b):\n m, n = len(a), len(b)\n dp = [([False] * (m + 1)) for _ in range(n + 1)]\n dp[0][0] = True\n for i in range(n + 1):\n for j in range(1, m + 1):\n if a[j - 1] == b[i - 1]:\n dp[i][j] = dp[i - 1][j - 1]\n elif a[j - 1].upper() == b[i - 1]:\n dp[i][j] = dp[i - 1][j - 1] or dp[i][j - 1]\n elif a[j - 1].islower():\n dp[i][j] = dp[i][j - 1]\n return 'YES' if dp[n][m] else 'NO'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef abbreviation(a, b):\n m, n = len(a), len(b)\n dp = [([False] * (m + 1)) for _ in range(n + 1)]\n dp[0][0] = True\n for i in range(n + 1):\n for j in range(1, m + 1):\n if a[j - 1] == b[i - 1]:\n dp[i][j] = dp[i - 1][j - 1]\n elif a[j - 1].upper() == b[i - 1]:\n dp[i][j] = dp[i - 1][j - 1] or dp[i][j - 1]\n elif a[j - 1].islower():\n dp[i][j] = dp[i][j - 1]\n return 'YES' if dp[n][m] else 'NO'\n\n\nif __name__ == '__main__':\n fptr = open(os.environ['OUTPUT_PATH'], 'w')\n q = int(input())\n for q_itr in range(q):\n a = input()\n b = input()\n result = abbreviation(a, b)\n fptr.write(result + '\\n')\n fptr.close()\n",
"step-4": "<mask token>\nimport math\nimport os\nimport random\nimport re\nimport sys\n\n\ndef abbreviation(a, b):\n m, n = len(a), len(b)\n dp = [([False] * (m + 1)) for _ in range(n + 1)]\n dp[0][0] = True\n for i in range(n + 1):\n for j in range(1, m + 1):\n if a[j - 1] == b[i - 1]:\n dp[i][j] = dp[i - 1][j - 1]\n elif a[j - 1].upper() == b[i - 1]:\n dp[i][j] = dp[i - 1][j - 1] or dp[i][j - 1]\n elif a[j - 1].islower():\n dp[i][j] = dp[i][j - 1]\n return 'YES' if dp[n][m] else 'NO'\n\n\nif __name__ == '__main__':\n fptr = open(os.environ['OUTPUT_PATH'], 'w')\n q = int(input())\n for q_itr in range(q):\n a = input()\n b = input()\n result = abbreviation(a, b)\n fptr.write(result + '\\n')\n fptr.close()\n",
"step-5": "\"\"\"\nYou can perform the following operations on the string, :\n\nCapitalize zero or more of 's lowercase letters.\nDelete all of the remaining lowercase letters in .\nGiven two strings, and , determine if it's possible to make equal to as described. If so, print YES on a new line. Otherwise, print NO.\n\nFor example, given and , in we can convert and delete to match . If and , matching is not possible because letters may only be capitalized or discarded, not changed.\n\nFunction Description\n\nComplete the function in the editor below. It must return either or .\n\nabbreviation has the following parameter(s):\n\na: the string to modify\nb: the string to match\nInput Format\n\nThe first line contains a single integer , the number of queries.\n\nEach of the next pairs of lines is as follows:\n- The first line of each query contains a single string, .\n- The second line of each query contains a single string, .\n\nConstraints\n\nString consists only of uppercase and lowercase English letters, ascii[A-Za-z].\nString consists only of uppercase English letters, ascii[A-Z].\nOutput Format\n\nFor each query, print YES on a new line if it's possible to make string equal to string . Otherwise, print NO.\n\nSample Input\n\n1\ndaBcd\nABC\nSample Output\n\nYES\nExplanation\n\nimage\n\nWe have daBcd and ABC. We perform the following operation:\n\nCapitalize the letters a and c in so that dABCd.\nDelete all the remaining lowercase letters in so that ABC.\nBecause we were able to successfully convert to , we print YES on a new line.\n\n\n\"\"\"\n#!/bin/python3\n\nimport math\nimport os\nimport random\nimport re\nimport sys\n\n\n# Complete the abbreviation function below.\ndef abbreviation(a, b):\n m, n = len(a), len(b)\n dp = [[False]*(m+1) for _ in range(n+1)]\n dp[0][0] = True\n for i in range(n+1):\n for j in range(1,m+1):\n if a[j-1] == b[i-1]:\n dp[i][j] = dp[i-1][j-1]\n elif a[j-1].upper() == b[i-1]:\n dp[i][j] = dp[i-1][j-1] or dp[i][j-1]\n elif a[j-1].islower():\n dp[i][j] = dp[i][j-1]\n return \"YES\" if dp[n][m] else \"NO\"\n\n\nif __name__ == '__main__':\n fptr = open(os.environ['OUTPUT_PATH'], 'w')\n\n q = int(input())\n\n for q_itr in range(q):\n a = input()\n\n b = input()\n\n result = abbreviation(a, b)\n\n fptr.write(result + '\\n')\n\n fptr.close()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
print('Hello world! im in github')
<|reserved_special_token_1|>
print("Hello world! im in github")
|
flexible
|
{
"blob_id": "2db6f88b733c23063803c374d7a5b651e8443bd5",
"index": 6135,
"step-1": "<mask token>\n",
"step-2": "print('Hello world! im in github')\n",
"step-3": "print(\"Hello world! im in github\")\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def file_list(dir):
subdir_list = []
for item in os.listdir(dir):
fullpath = os.path.join(dir, item)
if os.path.isdir(fullpath):
subdir_list.append(fullpath)
else:
print(fullpath)
for d in subdir_list:
file_list(d)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def file_list(dir):
subdir_list = []
for item in os.listdir(dir):
fullpath = os.path.join(dir, item)
if os.path.isdir(fullpath):
subdir_list.append(fullpath)
else:
print(fullpath)
for d in subdir_list:
file_list(d)
file_list('D:\\Workspace\test\\PythonProject')
<|reserved_special_token_1|>
import os
def file_list(dir):
subdir_list = []
for item in os.listdir(dir):
fullpath = os.path.join(dir, item)
if os.path.isdir(fullpath):
subdir_list.append(fullpath)
else:
print(fullpath)
for d in subdir_list:
file_list(d)
file_list('D:\\Workspace\test\\PythonProject')
<|reserved_special_token_1|>
# Print list of files and directories
import os
def file_list(dir):
subdir_list = []
for item in os.listdir(dir):
fullpath = os.path.join(dir,item)
if os.path.isdir(fullpath):
subdir_list.append(fullpath)
else:
print(fullpath)
for d in subdir_list:
file_list(d)
file_list('D:\Workspace\test\PythonProject')
|
flexible
|
{
"blob_id": "051544f41cc3c7d78210076cb9720866924ea2a1",
"index": 2942,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef file_list(dir):\n subdir_list = []\n for item in os.listdir(dir):\n fullpath = os.path.join(dir, item)\n if os.path.isdir(fullpath):\n subdir_list.append(fullpath)\n else:\n print(fullpath)\n for d in subdir_list:\n file_list(d)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef file_list(dir):\n subdir_list = []\n for item in os.listdir(dir):\n fullpath = os.path.join(dir, item)\n if os.path.isdir(fullpath):\n subdir_list.append(fullpath)\n else:\n print(fullpath)\n for d in subdir_list:\n file_list(d)\n\n\nfile_list('D:\\\\Workspace\\test\\\\PythonProject')\n",
"step-4": "import os\n\n\ndef file_list(dir):\n subdir_list = []\n for item in os.listdir(dir):\n fullpath = os.path.join(dir, item)\n if os.path.isdir(fullpath):\n subdir_list.append(fullpath)\n else:\n print(fullpath)\n for d in subdir_list:\n file_list(d)\n\n\nfile_list('D:\\\\Workspace\\test\\\\PythonProject')\n",
"step-5": "# Print list of files and directories\nimport os\n\ndef file_list(dir):\n subdir_list = []\n for item in os.listdir(dir):\n fullpath = os.path.join(dir,item)\n if os.path.isdir(fullpath):\n subdir_list.append(fullpath)\n else:\n print(fullpath)\n\n for d in subdir_list:\n file_list(d)\n\nfile_list('D:\\Workspace\\test\\PythonProject')\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
adict = {'name': 'bob', 'age': 23}
print('bob' in adict)
print('name' in adict)
for key in adict:
print('%s:%s' % (key, adict[key]))
print('%(name)s:%(age)s' % adict)
|
normal
|
{
"blob_id": "aa4d872c6a529d8acf18f1c3b477bc1816ac2887",
"index": 575,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('bob' in adict)\nprint('name' in adict)\nfor key in adict:\n print('%s:%s' % (key, adict[key]))\nprint('%(name)s:%(age)s' % adict)\n",
"step-3": "adict = {'name': 'bob', 'age': 23}\nprint('bob' in adict)\nprint('name' in adict)\nfor key in adict:\n print('%s:%s' % (key, adict[key]))\nprint('%(name)s:%(age)s' % adict)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
team = input("Wymien wszystkich czlonkow swojego zespolu: ").split(",")
for member in team:
print("Hello, " + member)
|
normal
|
{
"blob_id": "5d3f7d74cf1cc2612d599c65393abed11181c981",
"index": 2300,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor member in team:\n print('Hello, ' + member)\n",
"step-3": "team = input('Wymien wszystkich czlonkow swojego zespolu: ').split(',')\nfor member in team:\n print('Hello, ' + member)\n",
"step-4": "team = input(\"Wymien wszystkich czlonkow swojego zespolu: \").split(\",\")\nfor member in team:\n print(\"Hello, \" + member)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.db import transaction
from django.contrib.auth.models import Group
from drf_yasg import openapi
from drf_yasg.utils import swagger_auto_schema
from rest_framework import status, mixins
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.viewsets import ReadOnlyModelViewSet, GenericViewSet
from rest_access_policy import AccessViewSetMixin
from .models import CustomUsuario, PasswordResetToken
from .serializers import (
GroupSerializer,
CustomUsuarioSerializer,
CustomUsuarioMudarPasswordSerializer,
CustomUsuarioMudarPasswordAposResetSerializer,
CustomUsuarioMudarEmailSerializer,
CustomUsuarioMudarGrupoSerializer,
CustomUsuarioMudarAtivacaoSerializer,
PasswordResetTokenSerializer
)
from .views_access_policies import GroupAccessPolicy, CustomUsuarioAccessPolicy, PasswordResetTokenAccessPolicy
class CustomUsuarioViewSet(AccessViewSetMixin,
mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
mixins.ListModelMixin,
GenericViewSet):
"""
CustomUsuario ViewSet description:
create: Criar usuário.
retrieve: Consultar usuário.
list: Listar usuários.
ativar: Ativar usuário.
desativar: Desativar usuário.
mudar_password_apos_reset: Mudar a password do usuário após a solicitação de resetá-la. Consequentemente,
é desativado o token que permitiu a alteração.
mudar_password: Atualiza a password do usuário.
mudar_email: Atualiza o e-mail do usuário.
mudar_grupo: Atualiza o(s) grupo(s) do usuário.
"""
access_policy = CustomUsuarioAccessPolicy
serializer_class = CustomUsuarioSerializer
def get_queryset(self):
return CustomUsuario.objects.all().order_by('id')
def perform_create(self, serializer):
serializer.save(usuario_modificacao=self.request.user)
def perform_update(self, serializer):
serializer.save(usuario_modificacao=self.request.user)
@swagger_auto_schema(method='patch', manual_parameters=[openapi.Parameter('token',
openapi.IN_QUERY,
type=openapi.TYPE_STRING,
required=True)])
@transaction.atomic
@action(detail=True, methods=['patch'], serializer_class=CustomUsuarioMudarPasswordAposResetSerializer)
def mudar_password_apos_reset(self, request, pk=None):
usuario = self.get_object()
try:
token = request.query_params['token']
except KeyError:
return Response({'status': 'Token não informado.'},
status=status.HTTP_400_BAD_REQUEST)
try:
token_instance = usuario.password_reset_tokens.get(token=token)
except PasswordResetToken.DoesNotExist:
return Response({'status': 'Token inválido.'},
status=status.HTTP_400_BAD_REQUEST)
serializer_token = PasswordResetTokenSerializer(token_instance,
data={'ativo': False},
partial=True)
if serializer_token.is_valid():
serializer_token.save()
else:
return Response(serializer_token.errors,
status=status.HTTP_400_BAD_REQUEST)
serializer_usuario = self.get_serializer(
usuario,
data=request.data,
partial=True
)
if serializer_usuario.is_valid():
serializer_usuario.save()
return Response({'status': 'A nova senha foi registrada.'},
status=status.HTTP_200_OK)
return Response(serializer_usuario.errors,
status=status.HTTP_400_BAD_REQUEST)
@action(detail=True, methods=['patch'], serializer_class=CustomUsuarioMudarPasswordSerializer)
def mudar_password(self, request, pk=None):
usuario = self.get_object()
serializer = self.get_serializer(usuario,
data=request.data,
partial=True)
if serializer.is_valid():
serializer.save()
return Response({'status': 'A nova senha foi registrada.'},
status=status.HTTP_200_OK)
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
@action(detail=True, methods=['patch'], serializer_class=CustomUsuarioMudarEmailSerializer)
def mudar_email(self, request, pk=None):
usuario = self.get_object()
if 'password' not in request.data:
return Response({'status': 'Para mudar o e-mail é necessário '
'informar a senha atual.'},
status=status.HTTP_400_BAD_REQUEST)
serializer = self.get_serializer(usuario, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response({'status': 'O e-mail foi alterado com sucesso.'}, status=status.HTTP_200_OK)
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
@action(detail=True, methods=['patch'], serializer_class=CustomUsuarioMudarGrupoSerializer)
def mudar_grupo(self, request, pk=None):
usuario = self.get_object()
serializer = self.get_serializer(usuario, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response({'status': 'O grupo foi alterado com sucesso.'}, status=status.HTTP_200_OK)
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
@action(detail=True, methods=['patch'], serializer_class=CustomUsuarioMudarAtivacaoSerializer)
def ativar(self, request, pk=None):
usuario = self.get_object()
serializer = self.get_serializer(
usuario,
data={'is_active': True},
partial=True
)
if serializer.is_valid():
serializer.save()
try:
usuario.perfil.ativo = True
usuario.perfil.save()
except Exception:
print("Não há perfil vinculado ao usuário.")
return Response({'status': 'Usuário ativado.'},
status=status.HTTP_200_OK)
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
@action(detail=True, methods=['patch'], serializer_class=CustomUsuarioMudarAtivacaoSerializer)
def desativar(self, request, pk=None):
usuario = self.get_object()
serializer = self.get_serializer(
usuario,
data={'is_active': False},
partial=True
)
if serializer.is_valid():
serializer.save()
try:
usuario.perfil.ativo = False
usuario.perfil.save()
except Exception:
print("Não há perfil vinculado ao usuário.")
return Response({'status': 'Usuário desativado.'},
status=status.HTTP_200_OK)
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
class GroupViewSet(AccessViewSetMixin, ReadOnlyModelViewSet):
"""
Group ViewSet description:
list: Listar grupos.
retrieve: Consultar grupos.
"""
access_policy = GroupAccessPolicy
serializer_class = GroupSerializer
def get_queryset(self):
return Group.objects.all().order_by('id')
class PasswordResetTokenViewSet(AccessViewSetMixin,
mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
mixins.ListModelMixin,
GenericViewSet):
"""
Password Reset Token ViewSet description:
create: Criar token.
retrieve: Consultar token.
list: Listar tokens.
"""
access_policy = PasswordResetTokenAccessPolicy
serializer_class = PasswordResetTokenSerializer
def get_queryset(self):
return PasswordResetToken.objects.all().order_by('id')
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
self.perform_create(serializer)
headers = self.get_success_headers(serializer.data)
return Response({'status': 'Token criado. E-mail enviado ao '
'usuário para criação de nova senha.'},
status=status.HTTP_201_CREATED, headers=headers)
|
normal
|
{
"blob_id": "43b5936ca9368dcae8d41b44fd9dc927fe18c9bc",
"index": 8794,
"step-1": "<mask token>\n\n\nclass CustomUsuarioViewSet(AccessViewSetMixin, mixins.CreateModelMixin,\n mixins.RetrieveModelMixin, mixins.ListModelMixin, GenericViewSet):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def perform_update(self, serializer):\n serializer.save(usuario_modificacao=self.request.user)\n\n @swagger_auto_schema(method='patch', manual_parameters=[openapi.\n Parameter('token', openapi.IN_QUERY, type=openapi.TYPE_STRING,\n required=True)])\n @transaction.atomic\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarPasswordAposResetSerializer)\n def mudar_password_apos_reset(self, request, pk=None):\n usuario = self.get_object()\n try:\n token = request.query_params['token']\n except KeyError:\n return Response({'status': 'Token não informado.'}, status=\n status.HTTP_400_BAD_REQUEST)\n try:\n token_instance = usuario.password_reset_tokens.get(token=token)\n except PasswordResetToken.DoesNotExist:\n return Response({'status': 'Token inválido.'}, status=status.\n HTTP_400_BAD_REQUEST)\n serializer_token = PasswordResetTokenSerializer(token_instance,\n data={'ativo': False}, partial=True)\n if serializer_token.is_valid():\n serializer_token.save()\n else:\n return Response(serializer_token.errors, status=status.\n HTTP_400_BAD_REQUEST)\n serializer_usuario = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer_usuario.is_valid():\n serializer_usuario.save()\n return Response({'status': 'A nova senha foi registrada.'},\n status=status.HTTP_200_OK)\n return Response(serializer_usuario.errors, status=status.\n HTTP_400_BAD_REQUEST)\n <mask token>\n <mask token>\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarGrupoSerializer)\n def mudar_grupo(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer.is_valid():\n serializer.save()\n return Response({'status': 'O grupo foi alterado com sucesso.'},\n status=status.HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n <mask token>\n <mask token>\n\n\nclass GroupViewSet(AccessViewSetMixin, ReadOnlyModelViewSet):\n \"\"\"\n Group ViewSet description:\n\n list: Listar grupos.\n retrieve: Consultar grupos.\n \"\"\"\n access_policy = GroupAccessPolicy\n serializer_class = GroupSerializer\n\n def get_queryset(self):\n return Group.objects.all().order_by('id')\n\n\nclass PasswordResetTokenViewSet(AccessViewSetMixin, mixins.CreateModelMixin,\n mixins.RetrieveModelMixin, mixins.ListModelMixin, GenericViewSet):\n \"\"\"\n Password Reset Token ViewSet description:\n\n create: Criar token.\n retrieve: Consultar token.\n list: Listar tokens.\n \"\"\"\n access_policy = PasswordResetTokenAccessPolicy\n serializer_class = PasswordResetTokenSerializer\n\n def get_queryset(self):\n return PasswordResetToken.objects.all().order_by('id')\n\n def create(self, request, *args, **kwargs):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n self.perform_create(serializer)\n headers = self.get_success_headers(serializer.data)\n return Response({'status':\n 'Token criado. E-mail enviado ao usuário para criação de nova senha.'\n }, status=status.HTTP_201_CREATED, headers=headers)\n",
"step-2": "<mask token>\n\n\nclass CustomUsuarioViewSet(AccessViewSetMixin, mixins.CreateModelMixin,\n mixins.RetrieveModelMixin, mixins.ListModelMixin, GenericViewSet):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def perform_update(self, serializer):\n serializer.save(usuario_modificacao=self.request.user)\n\n @swagger_auto_schema(method='patch', manual_parameters=[openapi.\n Parameter('token', openapi.IN_QUERY, type=openapi.TYPE_STRING,\n required=True)])\n @transaction.atomic\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarPasswordAposResetSerializer)\n def mudar_password_apos_reset(self, request, pk=None):\n usuario = self.get_object()\n try:\n token = request.query_params['token']\n except KeyError:\n return Response({'status': 'Token não informado.'}, status=\n status.HTTP_400_BAD_REQUEST)\n try:\n token_instance = usuario.password_reset_tokens.get(token=token)\n except PasswordResetToken.DoesNotExist:\n return Response({'status': 'Token inválido.'}, status=status.\n HTTP_400_BAD_REQUEST)\n serializer_token = PasswordResetTokenSerializer(token_instance,\n data={'ativo': False}, partial=True)\n if serializer_token.is_valid():\n serializer_token.save()\n else:\n return Response(serializer_token.errors, status=status.\n HTTP_400_BAD_REQUEST)\n serializer_usuario = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer_usuario.is_valid():\n serializer_usuario.save()\n return Response({'status': 'A nova senha foi registrada.'},\n status=status.HTTP_200_OK)\n return Response(serializer_usuario.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarPasswordSerializer)\n def mudar_password(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer.is_valid():\n serializer.save()\n return Response({'status': 'A nova senha foi registrada.'},\n status=status.HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n <mask token>\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarGrupoSerializer)\n def mudar_grupo(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer.is_valid():\n serializer.save()\n return Response({'status': 'O grupo foi alterado com sucesso.'},\n status=status.HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n <mask token>\n <mask token>\n\n\nclass GroupViewSet(AccessViewSetMixin, ReadOnlyModelViewSet):\n \"\"\"\n Group ViewSet description:\n\n list: Listar grupos.\n retrieve: Consultar grupos.\n \"\"\"\n access_policy = GroupAccessPolicy\n serializer_class = GroupSerializer\n\n def get_queryset(self):\n return Group.objects.all().order_by('id')\n\n\nclass PasswordResetTokenViewSet(AccessViewSetMixin, mixins.CreateModelMixin,\n mixins.RetrieveModelMixin, mixins.ListModelMixin, GenericViewSet):\n \"\"\"\n Password Reset Token ViewSet description:\n\n create: Criar token.\n retrieve: Consultar token.\n list: Listar tokens.\n \"\"\"\n access_policy = PasswordResetTokenAccessPolicy\n serializer_class = PasswordResetTokenSerializer\n\n def get_queryset(self):\n return PasswordResetToken.objects.all().order_by('id')\n\n def create(self, request, *args, **kwargs):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n self.perform_create(serializer)\n headers = self.get_success_headers(serializer.data)\n return Response({'status':\n 'Token criado. E-mail enviado ao usuário para criação de nova senha.'\n }, status=status.HTTP_201_CREATED, headers=headers)\n",
"step-3": "<mask token>\n\n\nclass CustomUsuarioViewSet(AccessViewSetMixin, mixins.CreateModelMixin,\n mixins.RetrieveModelMixin, mixins.ListModelMixin, GenericViewSet):\n <mask token>\n <mask token>\n <mask token>\n\n def get_queryset(self):\n return CustomUsuario.objects.all().order_by('id')\n\n def perform_create(self, serializer):\n serializer.save(usuario_modificacao=self.request.user)\n\n def perform_update(self, serializer):\n serializer.save(usuario_modificacao=self.request.user)\n\n @swagger_auto_schema(method='patch', manual_parameters=[openapi.\n Parameter('token', openapi.IN_QUERY, type=openapi.TYPE_STRING,\n required=True)])\n @transaction.atomic\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarPasswordAposResetSerializer)\n def mudar_password_apos_reset(self, request, pk=None):\n usuario = self.get_object()\n try:\n token = request.query_params['token']\n except KeyError:\n return Response({'status': 'Token não informado.'}, status=\n status.HTTP_400_BAD_REQUEST)\n try:\n token_instance = usuario.password_reset_tokens.get(token=token)\n except PasswordResetToken.DoesNotExist:\n return Response({'status': 'Token inválido.'}, status=status.\n HTTP_400_BAD_REQUEST)\n serializer_token = PasswordResetTokenSerializer(token_instance,\n data={'ativo': False}, partial=True)\n if serializer_token.is_valid():\n serializer_token.save()\n else:\n return Response(serializer_token.errors, status=status.\n HTTP_400_BAD_REQUEST)\n serializer_usuario = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer_usuario.is_valid():\n serializer_usuario.save()\n return Response({'status': 'A nova senha foi registrada.'},\n status=status.HTTP_200_OK)\n return Response(serializer_usuario.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarPasswordSerializer)\n def mudar_password(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer.is_valid():\n serializer.save()\n return Response({'status': 'A nova senha foi registrada.'},\n status=status.HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarEmailSerializer)\n def mudar_email(self, request, pk=None):\n usuario = self.get_object()\n if 'password' not in request.data:\n return Response({'status':\n 'Para mudar o e-mail é necessário informar a senha atual.'},\n status=status.HTTP_400_BAD_REQUEST)\n serializer = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer.is_valid():\n serializer.save()\n return Response({'status': 'O e-mail foi alterado com sucesso.'\n }, status=status.HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarGrupoSerializer)\n def mudar_grupo(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer.is_valid():\n serializer.save()\n return Response({'status': 'O grupo foi alterado com sucesso.'},\n status=status.HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarAtivacaoSerializer)\n def ativar(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario, data={'is_active': True},\n partial=True)\n if serializer.is_valid():\n serializer.save()\n try:\n usuario.perfil.ativo = True\n usuario.perfil.save()\n except Exception:\n print('Não há perfil vinculado ao usuário.')\n return Response({'status': 'Usuário ativado.'}, status=status.\n HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarAtivacaoSerializer)\n def desativar(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario, data={'is_active': False},\n partial=True)\n if serializer.is_valid():\n serializer.save()\n try:\n usuario.perfil.ativo = False\n usuario.perfil.save()\n except Exception:\n print('Não há perfil vinculado ao usuário.')\n return Response({'status': 'Usuário desativado.'}, status=\n status.HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n\nclass GroupViewSet(AccessViewSetMixin, ReadOnlyModelViewSet):\n \"\"\"\n Group ViewSet description:\n\n list: Listar grupos.\n retrieve: Consultar grupos.\n \"\"\"\n access_policy = GroupAccessPolicy\n serializer_class = GroupSerializer\n\n def get_queryset(self):\n return Group.objects.all().order_by('id')\n\n\nclass PasswordResetTokenViewSet(AccessViewSetMixin, mixins.CreateModelMixin,\n mixins.RetrieveModelMixin, mixins.ListModelMixin, GenericViewSet):\n \"\"\"\n Password Reset Token ViewSet description:\n\n create: Criar token.\n retrieve: Consultar token.\n list: Listar tokens.\n \"\"\"\n access_policy = PasswordResetTokenAccessPolicy\n serializer_class = PasswordResetTokenSerializer\n\n def get_queryset(self):\n return PasswordResetToken.objects.all().order_by('id')\n\n def create(self, request, *args, **kwargs):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n self.perform_create(serializer)\n headers = self.get_success_headers(serializer.data)\n return Response({'status':\n 'Token criado. E-mail enviado ao usuário para criação de nova senha.'\n }, status=status.HTTP_201_CREATED, headers=headers)\n",
"step-4": "<mask token>\n\n\nclass CustomUsuarioViewSet(AccessViewSetMixin, mixins.CreateModelMixin,\n mixins.RetrieveModelMixin, mixins.ListModelMixin, GenericViewSet):\n \"\"\"\n CustomUsuario ViewSet description:\n\n create: Criar usuário.\n retrieve: Consultar usuário.\n list: Listar usuários.\n ativar: Ativar usuário.\n desativar: Desativar usuário.\n mudar_password_apos_reset: Mudar a password do usuário após a solicitação de resetá-la. Consequentemente,\n é desativado o token que permitiu a alteração.\n mudar_password: Atualiza a password do usuário.\n mudar_email: Atualiza o e-mail do usuário.\n mudar_grupo: Atualiza o(s) grupo(s) do usuário.\n \"\"\"\n access_policy = CustomUsuarioAccessPolicy\n serializer_class = CustomUsuarioSerializer\n\n def get_queryset(self):\n return CustomUsuario.objects.all().order_by('id')\n\n def perform_create(self, serializer):\n serializer.save(usuario_modificacao=self.request.user)\n\n def perform_update(self, serializer):\n serializer.save(usuario_modificacao=self.request.user)\n\n @swagger_auto_schema(method='patch', manual_parameters=[openapi.\n Parameter('token', openapi.IN_QUERY, type=openapi.TYPE_STRING,\n required=True)])\n @transaction.atomic\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarPasswordAposResetSerializer)\n def mudar_password_apos_reset(self, request, pk=None):\n usuario = self.get_object()\n try:\n token = request.query_params['token']\n except KeyError:\n return Response({'status': 'Token não informado.'}, status=\n status.HTTP_400_BAD_REQUEST)\n try:\n token_instance = usuario.password_reset_tokens.get(token=token)\n except PasswordResetToken.DoesNotExist:\n return Response({'status': 'Token inválido.'}, status=status.\n HTTP_400_BAD_REQUEST)\n serializer_token = PasswordResetTokenSerializer(token_instance,\n data={'ativo': False}, partial=True)\n if serializer_token.is_valid():\n serializer_token.save()\n else:\n return Response(serializer_token.errors, status=status.\n HTTP_400_BAD_REQUEST)\n serializer_usuario = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer_usuario.is_valid():\n serializer_usuario.save()\n return Response({'status': 'A nova senha foi registrada.'},\n status=status.HTTP_200_OK)\n return Response(serializer_usuario.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarPasswordSerializer)\n def mudar_password(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer.is_valid():\n serializer.save()\n return Response({'status': 'A nova senha foi registrada.'},\n status=status.HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarEmailSerializer)\n def mudar_email(self, request, pk=None):\n usuario = self.get_object()\n if 'password' not in request.data:\n return Response({'status':\n 'Para mudar o e-mail é necessário informar a senha atual.'},\n status=status.HTTP_400_BAD_REQUEST)\n serializer = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer.is_valid():\n serializer.save()\n return Response({'status': 'O e-mail foi alterado com sucesso.'\n }, status=status.HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarGrupoSerializer)\n def mudar_grupo(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario, data=request.data,\n partial=True)\n if serializer.is_valid():\n serializer.save()\n return Response({'status': 'O grupo foi alterado com sucesso.'},\n status=status.HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarAtivacaoSerializer)\n def ativar(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario, data={'is_active': True},\n partial=True)\n if serializer.is_valid():\n serializer.save()\n try:\n usuario.perfil.ativo = True\n usuario.perfil.save()\n except Exception:\n print('Não há perfil vinculado ao usuário.')\n return Response({'status': 'Usuário ativado.'}, status=status.\n HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=\n CustomUsuarioMudarAtivacaoSerializer)\n def desativar(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario, data={'is_active': False},\n partial=True)\n if serializer.is_valid():\n serializer.save()\n try:\n usuario.perfil.ativo = False\n usuario.perfil.save()\n except Exception:\n print('Não há perfil vinculado ao usuário.')\n return Response({'status': 'Usuário desativado.'}, status=\n status.HTTP_200_OK)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n\nclass GroupViewSet(AccessViewSetMixin, ReadOnlyModelViewSet):\n \"\"\"\n Group ViewSet description:\n\n list: Listar grupos.\n retrieve: Consultar grupos.\n \"\"\"\n access_policy = GroupAccessPolicy\n serializer_class = GroupSerializer\n\n def get_queryset(self):\n return Group.objects.all().order_by('id')\n\n\nclass PasswordResetTokenViewSet(AccessViewSetMixin, mixins.CreateModelMixin,\n mixins.RetrieveModelMixin, mixins.ListModelMixin, GenericViewSet):\n \"\"\"\n Password Reset Token ViewSet description:\n\n create: Criar token.\n retrieve: Consultar token.\n list: Listar tokens.\n \"\"\"\n access_policy = PasswordResetTokenAccessPolicy\n serializer_class = PasswordResetTokenSerializer\n\n def get_queryset(self):\n return PasswordResetToken.objects.all().order_by('id')\n\n def create(self, request, *args, **kwargs):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n self.perform_create(serializer)\n headers = self.get_success_headers(serializer.data)\n return Response({'status':\n 'Token criado. E-mail enviado ao usuário para criação de nova senha.'\n }, status=status.HTTP_201_CREATED, headers=headers)\n",
"step-5": "from django.db import transaction\nfrom django.contrib.auth.models import Group\nfrom drf_yasg import openapi\nfrom drf_yasg.utils import swagger_auto_schema\n\nfrom rest_framework import status, mixins\nfrom rest_framework.decorators import action\nfrom rest_framework.response import Response\nfrom rest_framework.viewsets import ReadOnlyModelViewSet, GenericViewSet\nfrom rest_access_policy import AccessViewSetMixin\n\nfrom .models import CustomUsuario, PasswordResetToken\nfrom .serializers import (\n GroupSerializer,\n CustomUsuarioSerializer,\n CustomUsuarioMudarPasswordSerializer,\n CustomUsuarioMudarPasswordAposResetSerializer,\n CustomUsuarioMudarEmailSerializer,\n CustomUsuarioMudarGrupoSerializer,\n CustomUsuarioMudarAtivacaoSerializer,\n PasswordResetTokenSerializer\n)\nfrom .views_access_policies import GroupAccessPolicy, CustomUsuarioAccessPolicy, PasswordResetTokenAccessPolicy\n\nclass CustomUsuarioViewSet(AccessViewSetMixin,\n mixins.CreateModelMixin,\n mixins.RetrieveModelMixin,\n mixins.ListModelMixin,\n GenericViewSet):\n \"\"\"\n CustomUsuario ViewSet description:\n\n create: Criar usuário.\n retrieve: Consultar usuário.\n list: Listar usuários.\n ativar: Ativar usuário.\n desativar: Desativar usuário.\n mudar_password_apos_reset: Mudar a password do usuário após a solicitação de resetá-la. Consequentemente,\n é desativado o token que permitiu a alteração.\n mudar_password: Atualiza a password do usuário.\n mudar_email: Atualiza o e-mail do usuário.\n mudar_grupo: Atualiza o(s) grupo(s) do usuário.\n \"\"\"\n access_policy = CustomUsuarioAccessPolicy\n serializer_class = CustomUsuarioSerializer\n\n def get_queryset(self):\n return CustomUsuario.objects.all().order_by('id')\n\n def perform_create(self, serializer):\n serializer.save(usuario_modificacao=self.request.user)\n\n def perform_update(self, serializer):\n serializer.save(usuario_modificacao=self.request.user)\n\n @swagger_auto_schema(method='patch', manual_parameters=[openapi.Parameter('token',\n openapi.IN_QUERY,\n type=openapi.TYPE_STRING,\n required=True)])\n @transaction.atomic\n @action(detail=True, methods=['patch'], serializer_class=CustomUsuarioMudarPasswordAposResetSerializer)\n def mudar_password_apos_reset(self, request, pk=None):\n usuario = self.get_object()\n try:\n token = request.query_params['token']\n except KeyError:\n return Response({'status': 'Token não informado.'},\n status=status.HTTP_400_BAD_REQUEST)\n try:\n token_instance = usuario.password_reset_tokens.get(token=token)\n except PasswordResetToken.DoesNotExist:\n return Response({'status': 'Token inválido.'},\n status=status.HTTP_400_BAD_REQUEST)\n\n serializer_token = PasswordResetTokenSerializer(token_instance,\n data={'ativo': False},\n partial=True)\n if serializer_token.is_valid():\n serializer_token.save()\n else:\n return Response(serializer_token.errors,\n status=status.HTTP_400_BAD_REQUEST)\n\n serializer_usuario = self.get_serializer(\n usuario,\n data=request.data,\n partial=True\n )\n\n if serializer_usuario.is_valid():\n serializer_usuario.save()\n return Response({'status': 'A nova senha foi registrada.'},\n status=status.HTTP_200_OK)\n return Response(serializer_usuario.errors,\n status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=CustomUsuarioMudarPasswordSerializer)\n def mudar_password(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario,\n data=request.data,\n partial=True)\n\n if serializer.is_valid():\n serializer.save()\n return Response({'status': 'A nova senha foi registrada.'},\n status=status.HTTP_200_OK)\n\n return Response(serializer.errors,\n status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=CustomUsuarioMudarEmailSerializer)\n def mudar_email(self, request, pk=None):\n usuario = self.get_object()\n\n if 'password' not in request.data:\n return Response({'status': 'Para mudar o e-mail é necessário '\n 'informar a senha atual.'},\n status=status.HTTP_400_BAD_REQUEST)\n\n serializer = self.get_serializer(usuario, data=request.data, partial=True)\n if serializer.is_valid():\n serializer.save()\n return Response({'status': 'O e-mail foi alterado com sucesso.'}, status=status.HTTP_200_OK)\n\n return Response(serializer.errors,\n status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=CustomUsuarioMudarGrupoSerializer)\n def mudar_grupo(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(usuario, data=request.data, partial=True)\n if serializer.is_valid():\n serializer.save()\n return Response({'status': 'O grupo foi alterado com sucesso.'}, status=status.HTTP_200_OK)\n return Response(serializer.errors,\n status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=CustomUsuarioMudarAtivacaoSerializer)\n def ativar(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(\n usuario,\n data={'is_active': True},\n partial=True\n )\n if serializer.is_valid():\n serializer.save()\n try:\n usuario.perfil.ativo = True\n usuario.perfil.save()\n except Exception:\n print(\"Não há perfil vinculado ao usuário.\")\n return Response({'status': 'Usuário ativado.'},\n status=status.HTTP_200_OK)\n\n return Response(serializer.errors,\n status=status.HTTP_400_BAD_REQUEST)\n\n @action(detail=True, methods=['patch'], serializer_class=CustomUsuarioMudarAtivacaoSerializer)\n def desativar(self, request, pk=None):\n usuario = self.get_object()\n serializer = self.get_serializer(\n usuario,\n data={'is_active': False},\n partial=True\n )\n if serializer.is_valid():\n serializer.save()\n try:\n usuario.perfil.ativo = False\n usuario.perfil.save()\n except Exception:\n print(\"Não há perfil vinculado ao usuário.\")\n return Response({'status': 'Usuário desativado.'},\n status=status.HTTP_200_OK)\n\n return Response(serializer.errors,\n status=status.HTTP_400_BAD_REQUEST)\n\n\nclass GroupViewSet(AccessViewSetMixin, ReadOnlyModelViewSet):\n \"\"\"\n Group ViewSet description:\n\n list: Listar grupos.\n retrieve: Consultar grupos.\n \"\"\"\n access_policy = GroupAccessPolicy\n serializer_class = GroupSerializer\n\n def get_queryset(self):\n return Group.objects.all().order_by('id')\n\n\nclass PasswordResetTokenViewSet(AccessViewSetMixin,\n mixins.CreateModelMixin,\n mixins.RetrieveModelMixin,\n mixins.ListModelMixin,\n GenericViewSet):\n \"\"\"\n Password Reset Token ViewSet description:\n\n create: Criar token.\n retrieve: Consultar token.\n list: Listar tokens.\n \"\"\"\n access_policy = PasswordResetTokenAccessPolicy\n serializer_class = PasswordResetTokenSerializer\n\n def get_queryset(self):\n return PasswordResetToken.objects.all().order_by('id')\n\n def create(self, request, *args, **kwargs):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n self.perform_create(serializer)\n headers = self.get_success_headers(serializer.data)\n return Response({'status': 'Token criado. E-mail enviado ao '\n 'usuário para criação de nova senha.'},\n status=status.HTTP_201_CREATED, headers=headers)\n",
"step-ids": [
13,
14,
19,
21,
23
]
}
|
[
13,
14,
19,
21,
23
] |
import base64
import json
class BaseTestCloudAuth:
"""
Required
setup: initialize test case
teardown: del items for test
decode: check decoded token and assigned info
"""
ACCESS_TOKEN = ""
SCOPE_ACCESS_TOKEN = ""
ID_TOKEN = ""
TESTCLIENT = None
def assert_get_response(client, endpoint, token, status_code, detail=""):
if token:
headers = {"authorization": f"Bearer {token}"}
else:
headers = {}
response = client.get(endpoint, headers=headers)
assert response.status_code == status_code, f"{response.json()}"
if detail:
assert response.json().get("detail", "") == detail
return response
def decode_token(token):
header, payload, *rest = token.split(".")
header += f"{'=' * (len(header) % 4)}"
payload += f"{'=' * (len(payload) % 4)}"
header = json.loads(base64.b64decode(header).decode())
payload = json.loads(base64.b64decode(payload).decode())
return header, payload, rest
|
normal
|
{
"blob_id": "9a2b5b9b2b2f9532b5d0749147aca644c2ac26e3",
"index": 2878,
"step-1": "<mask token>\n\n\nclass BaseTestCloudAuth:\n \"\"\"\n Required\n setup: initialize test case\n teardown: del items for test\n decode: check decoded token and assigned info\n \"\"\"\n ACCESS_TOKEN = ''\n SCOPE_ACCESS_TOKEN = ''\n ID_TOKEN = ''\n TESTCLIENT = None\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass BaseTestCloudAuth:\n \"\"\"\n Required\n setup: initialize test case\n teardown: del items for test\n decode: check decoded token and assigned info\n \"\"\"\n ACCESS_TOKEN = ''\n SCOPE_ACCESS_TOKEN = ''\n ID_TOKEN = ''\n TESTCLIENT = None\n\n\n<mask token>\n\n\ndef decode_token(token):\n header, payload, *rest = token.split('.')\n header += f\"{'=' * (len(header) % 4)}\"\n payload += f\"{'=' * (len(payload) % 4)}\"\n header = json.loads(base64.b64decode(header).decode())\n payload = json.loads(base64.b64decode(payload).decode())\n return header, payload, rest\n",
"step-3": "<mask token>\n\n\nclass BaseTestCloudAuth:\n \"\"\"\n Required\n setup: initialize test case\n teardown: del items for test\n decode: check decoded token and assigned info\n \"\"\"\n ACCESS_TOKEN = ''\n SCOPE_ACCESS_TOKEN = ''\n ID_TOKEN = ''\n TESTCLIENT = None\n\n\ndef assert_get_response(client, endpoint, token, status_code, detail=''):\n if token:\n headers = {'authorization': f'Bearer {token}'}\n else:\n headers = {}\n response = client.get(endpoint, headers=headers)\n assert response.status_code == status_code, f'{response.json()}'\n if detail:\n assert response.json().get('detail', '') == detail\n return response\n\n\ndef decode_token(token):\n header, payload, *rest = token.split('.')\n header += f\"{'=' * (len(header) % 4)}\"\n payload += f\"{'=' * (len(payload) % 4)}\"\n header = json.loads(base64.b64decode(header).decode())\n payload = json.loads(base64.b64decode(payload).decode())\n return header, payload, rest\n",
"step-4": "import base64\nimport json\n\n\nclass BaseTestCloudAuth:\n \"\"\"\n Required\n setup: initialize test case\n teardown: del items for test\n decode: check decoded token and assigned info\n \"\"\"\n ACCESS_TOKEN = ''\n SCOPE_ACCESS_TOKEN = ''\n ID_TOKEN = ''\n TESTCLIENT = None\n\n\ndef assert_get_response(client, endpoint, token, status_code, detail=''):\n if token:\n headers = {'authorization': f'Bearer {token}'}\n else:\n headers = {}\n response = client.get(endpoint, headers=headers)\n assert response.status_code == status_code, f'{response.json()}'\n if detail:\n assert response.json().get('detail', '') == detail\n return response\n\n\ndef decode_token(token):\n header, payload, *rest = token.split('.')\n header += f\"{'=' * (len(header) % 4)}\"\n payload += f\"{'=' * (len(payload) % 4)}\"\n header = json.loads(base64.b64decode(header).decode())\n payload = json.loads(base64.b64decode(payload).decode())\n return header, payload, rest\n",
"step-5": "import base64\nimport json\n\n\nclass BaseTestCloudAuth:\n \"\"\"\n Required\n setup: initialize test case\n teardown: del items for test\n decode: check decoded token and assigned info\n \"\"\"\n\n ACCESS_TOKEN = \"\"\n SCOPE_ACCESS_TOKEN = \"\"\n ID_TOKEN = \"\"\n TESTCLIENT = None\n\n\ndef assert_get_response(client, endpoint, token, status_code, detail=\"\"):\n if token:\n headers = {\"authorization\": f\"Bearer {token}\"}\n else:\n headers = {}\n response = client.get(endpoint, headers=headers)\n assert response.status_code == status_code, f\"{response.json()}\"\n if detail:\n assert response.json().get(\"detail\", \"\") == detail\n return response\n\n\ndef decode_token(token):\n header, payload, *rest = token.split(\".\")\n\n header += f\"{'=' * (len(header) % 4)}\"\n payload += f\"{'=' * (len(payload) % 4)}\"\n header = json.loads(base64.b64decode(header).decode())\n payload = json.loads(base64.b64decode(payload).decode())\n return header, payload, rest\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for file_dir in file_dirs:
fdir = 'custom_dataset/' + file_dir
for directory in os.listdir(fdir):
new_location = ('/content/gdrive/My Drive/project/custom_dataset/' +
file_dir + directory)
xml_files = glob.glob(fdir + directory + '/*.xml')
print(fdir + directory + '/' + '*.xml')
print(len(xml_files))
for xml_file in xml_files:
tree = ET.parse(xml_file)
root = tree.getroot()
filename = root.find('filename')
name = root[1].text
if name[0] == '\\':
name = name[1:]
root[1].text = name
frame_name = root[2].text[root[2].text.rfind('/') + 1:]
root[2].text = new_location + '/' + frame_name
tree.write(fdir + directory + '/' + frame_name[:-4] + '.xml')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
file_dirs = ['train/', 'test/']
for file_dir in file_dirs:
fdir = 'custom_dataset/' + file_dir
for directory in os.listdir(fdir):
new_location = ('/content/gdrive/My Drive/project/custom_dataset/' +
file_dir + directory)
xml_files = glob.glob(fdir + directory + '/*.xml')
print(fdir + directory + '/' + '*.xml')
print(len(xml_files))
for xml_file in xml_files:
tree = ET.parse(xml_file)
root = tree.getroot()
filename = root.find('filename')
name = root[1].text
if name[0] == '\\':
name = name[1:]
root[1].text = name
frame_name = root[2].text[root[2].text.rfind('/') + 1:]
root[2].text = new_location + '/' + frame_name
tree.write(fdir + directory + '/' + frame_name[:-4] + '.xml')
<|reserved_special_token_1|>
import glob
import os
import xml.etree.ElementTree as ET
file_dirs = ['train/', 'test/']
for file_dir in file_dirs:
fdir = 'custom_dataset/' + file_dir
for directory in os.listdir(fdir):
new_location = ('/content/gdrive/My Drive/project/custom_dataset/' +
file_dir + directory)
xml_files = glob.glob(fdir + directory + '/*.xml')
print(fdir + directory + '/' + '*.xml')
print(len(xml_files))
for xml_file in xml_files:
tree = ET.parse(xml_file)
root = tree.getroot()
filename = root.find('filename')
name = root[1].text
if name[0] == '\\':
name = name[1:]
root[1].text = name
frame_name = root[2].text[root[2].text.rfind('/') + 1:]
root[2].text = new_location + '/' + frame_name
tree.write(fdir + directory + '/' + frame_name[:-4] + '.xml')
<|reserved_special_token_1|>
import glob
import os
import xml.etree.ElementTree as ET
file_dirs = ["train/","test/"]
for file_dir in file_dirs:
fdir = "custom_dataset/" + file_dir
for directory in os.listdir(fdir):
new_location = "/content/gdrive/My Drive/project/custom_dataset/" + file_dir + directory
xml_files = glob.glob(fdir + directory + "/*.xml")
print((fdir + directory + "/" + "*.xml"))
print(len(xml_files))
for xml_file in xml_files:
tree = ET.parse(xml_file)
root = tree.getroot()
filename = root.find('filename')
name = root[1].text
if name[0] == '\\':
name = name[1:]
root[1].text = name
frame_name = root[2].text[root[2].text.rfind('/') +1:]
root[2].text = (new_location + "/" + frame_name)
tree.write(fdir + directory + "/" + frame_name[:-4] + '.xml')
|
flexible
|
{
"blob_id": "3a053c2c8a2b9123974183e65914dc0f73d2e078",
"index": 6368,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor file_dir in file_dirs:\n fdir = 'custom_dataset/' + file_dir\n for directory in os.listdir(fdir):\n new_location = ('/content/gdrive/My Drive/project/custom_dataset/' +\n file_dir + directory)\n xml_files = glob.glob(fdir + directory + '/*.xml')\n print(fdir + directory + '/' + '*.xml')\n print(len(xml_files))\n for xml_file in xml_files:\n tree = ET.parse(xml_file)\n root = tree.getroot()\n filename = root.find('filename')\n name = root[1].text\n if name[0] == '\\\\':\n name = name[1:]\n root[1].text = name\n frame_name = root[2].text[root[2].text.rfind('/') + 1:]\n root[2].text = new_location + '/' + frame_name\n tree.write(fdir + directory + '/' + frame_name[:-4] + '.xml')\n",
"step-3": "<mask token>\nfile_dirs = ['train/', 'test/']\nfor file_dir in file_dirs:\n fdir = 'custom_dataset/' + file_dir\n for directory in os.listdir(fdir):\n new_location = ('/content/gdrive/My Drive/project/custom_dataset/' +\n file_dir + directory)\n xml_files = glob.glob(fdir + directory + '/*.xml')\n print(fdir + directory + '/' + '*.xml')\n print(len(xml_files))\n for xml_file in xml_files:\n tree = ET.parse(xml_file)\n root = tree.getroot()\n filename = root.find('filename')\n name = root[1].text\n if name[0] == '\\\\':\n name = name[1:]\n root[1].text = name\n frame_name = root[2].text[root[2].text.rfind('/') + 1:]\n root[2].text = new_location + '/' + frame_name\n tree.write(fdir + directory + '/' + frame_name[:-4] + '.xml')\n",
"step-4": "import glob\nimport os\nimport xml.etree.ElementTree as ET\nfile_dirs = ['train/', 'test/']\nfor file_dir in file_dirs:\n fdir = 'custom_dataset/' + file_dir\n for directory in os.listdir(fdir):\n new_location = ('/content/gdrive/My Drive/project/custom_dataset/' +\n file_dir + directory)\n xml_files = glob.glob(fdir + directory + '/*.xml')\n print(fdir + directory + '/' + '*.xml')\n print(len(xml_files))\n for xml_file in xml_files:\n tree = ET.parse(xml_file)\n root = tree.getroot()\n filename = root.find('filename')\n name = root[1].text\n if name[0] == '\\\\':\n name = name[1:]\n root[1].text = name\n frame_name = root[2].text[root[2].text.rfind('/') + 1:]\n root[2].text = new_location + '/' + frame_name\n tree.write(fdir + directory + '/' + frame_name[:-4] + '.xml')\n",
"step-5": "import glob\nimport os\nimport xml.etree.ElementTree as ET\n\nfile_dirs = [\"train/\",\"test/\"]\n\nfor file_dir in file_dirs:\n fdir = \"custom_dataset/\" + file_dir\n for directory in os.listdir(fdir):\n \n new_location = \"/content/gdrive/My Drive/project/custom_dataset/\" + file_dir + directory \n xml_files = glob.glob(fdir + directory + \"/*.xml\")\n print((fdir + directory + \"/\" + \"*.xml\"))\n print(len(xml_files))\n for xml_file in xml_files: \n tree = ET.parse(xml_file)\n root = tree.getroot()\n filename = root.find('filename')\n name = root[1].text \n if name[0] == '\\\\': \n name = name[1:]\n root[1].text = name\n frame_name = root[2].text[root[2].text.rfind('/') +1:]\n root[2].text = (new_location + \"/\" + frame_name)\n tree.write(fdir + directory + \"/\" + frame_name[:-4] + '.xml')",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def DiceLoss(pred, target, smooth=2):
index = (2 * torch.sum(pred * target) + smooth) / (torch.sum(pred) +
torch.sum(target) + smooth)
return 1 - index
<|reserved_special_token_1|>
import torch
def DiceLoss(pred, target, smooth=2):
index = (2 * torch.sum(pred * target) + smooth) / (torch.sum(pred) +
torch.sum(target) + smooth)
return 1 - index
<|reserved_special_token_1|>
import torch
def DiceLoss(pred,target,smooth=2):
# print("pred shape: ",pred.shape)
# print("target shape: ",target.shape)
index = (2*torch.sum(pred*target)+smooth)/(torch.sum(pred)+torch.sum(target)+smooth)
#if torch.sum(target).item() == 0:
#print("instersection: ",torch.sum(pred*target).item())
# print("pred: ",torch.sum(pred).item())
# print("target: ",torch.sum(target).item())
#print("Index: ", index.item())
return 1-index
|
flexible
|
{
"blob_id": "0aa0fcbb0ec1272bea93574a9287de9f526539c8",
"index": 3119,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef DiceLoss(pred, target, smooth=2):\n index = (2 * torch.sum(pred * target) + smooth) / (torch.sum(pred) +\n torch.sum(target) + smooth)\n return 1 - index\n",
"step-3": "import torch\n\n\ndef DiceLoss(pred, target, smooth=2):\n index = (2 * torch.sum(pred * target) + smooth) / (torch.sum(pred) +\n torch.sum(target) + smooth)\n return 1 - index\n",
"step-4": "import torch\ndef DiceLoss(pred,target,smooth=2):\n # print(\"pred shape: \",pred.shape)\n # print(\"target shape: \",target.shape)\n index = (2*torch.sum(pred*target)+smooth)/(torch.sum(pred)+torch.sum(target)+smooth)\n #if torch.sum(target).item() == 0:\n #print(\"instersection: \",torch.sum(pred*target).item())\n # print(\"pred: \",torch.sum(pred).item())\n # print(\"target: \",torch.sum(target).item())\n #print(\"Index: \", index.item())\n return 1-index",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def git_short_hash():
try:
git_str = '+' + os.popen('git log -1 --format="%h"').read().strip()
except:
git_str = ''
else:
if git_str == '+':
git_str = ''
return git_str
<|reserved_special_token_0|>
def write_version_py(filename='ipyhi/version.py'):
cnt = """# THIS FILE IS GENERATED FROM IPYHI SETUP.PY
# pylint: disable=missing-module-docstring
short_version = '%(version)s'
version = '%(fullversion)s'
release = %(isrelease)s
"""
a = open(filename, 'w')
try:
a.write(cnt % {'version': VERSION, 'fullversion': FULLVERSION,
'isrelease': str(ISRELEASED)})
finally:
a.close()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def git_short_hash():
try:
git_str = '+' + os.popen('git log -1 --format="%h"').read().strip()
except:
git_str = ''
else:
if git_str == '+':
git_str = ''
return git_str
if not ISRELEASED:
FULLVERSION += '.dev' + str(MICRO) + git_short_hash()
def write_version_py(filename='ipyhi/version.py'):
cnt = """# THIS FILE IS GENERATED FROM IPYHI SETUP.PY
# pylint: disable=missing-module-docstring
short_version = '%(version)s'
version = '%(fullversion)s'
release = %(isrelease)s
"""
a = open(filename, 'w')
try:
a.write(cnt % {'version': VERSION, 'fullversion': FULLVERSION,
'isrelease': str(ISRELEASED)})
finally:
a.close()
setup(name='ipyhi', version=VERSION, description=DESCRIPTION,
long_description=LONG_DESCRIPTION, author='Paul Nation', author_email=
'[email protected]', url='https://github.com/nonhermitian/ipyhi',
license='Apache-2', packages=find_packages(exclude=('tests', 'docs')),
package_data={'ipyhi': ['js/*.js']}, install_requires=['ipython',
'jupyter', 'ipywidgets'], classifiers=[
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9'])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
MAJOR = 0
MINOR = 1
MICRO = 0
ISRELEASED = False
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
FULLVERSION = VERSION
DOCLINES = __doc__.split('\n')
DESCRIPTION = DOCLINES[0]
LONG_DESCRIPTION = '\n'.join(DOCLINES[2:])
def git_short_hash():
try:
git_str = '+' + os.popen('git log -1 --format="%h"').read().strip()
except:
git_str = ''
else:
if git_str == '+':
git_str = ''
return git_str
if not ISRELEASED:
FULLVERSION += '.dev' + str(MICRO) + git_short_hash()
def write_version_py(filename='ipyhi/version.py'):
cnt = """# THIS FILE IS GENERATED FROM IPYHI SETUP.PY
# pylint: disable=missing-module-docstring
short_version = '%(version)s'
version = '%(fullversion)s'
release = %(isrelease)s
"""
a = open(filename, 'w')
try:
a.write(cnt % {'version': VERSION, 'fullversion': FULLVERSION,
'isrelease': str(ISRELEASED)})
finally:
a.close()
setup(name='ipyhi', version=VERSION, description=DESCRIPTION,
long_description=LONG_DESCRIPTION, author='Paul Nation', author_email=
'[email protected]', url='https://github.com/nonhermitian/ipyhi',
license='Apache-2', packages=find_packages(exclude=('tests', 'docs')),
package_data={'ipyhi': ['js/*.js']}, install_requires=['ipython',
'jupyter', 'ipywidgets'], classifiers=[
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9'])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import os
from setuptools import find_packages, setup
MAJOR = 0
MINOR = 1
MICRO = 0
ISRELEASED = False
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
FULLVERSION = VERSION
DOCLINES = __doc__.split('\n')
DESCRIPTION = DOCLINES[0]
LONG_DESCRIPTION = '\n'.join(DOCLINES[2:])
def git_short_hash():
try:
git_str = '+' + os.popen('git log -1 --format="%h"').read().strip()
except:
git_str = ''
else:
if git_str == '+':
git_str = ''
return git_str
if not ISRELEASED:
FULLVERSION += '.dev' + str(MICRO) + git_short_hash()
def write_version_py(filename='ipyhi/version.py'):
cnt = """# THIS FILE IS GENERATED FROM IPYHI SETUP.PY
# pylint: disable=missing-module-docstring
short_version = '%(version)s'
version = '%(fullversion)s'
release = %(isrelease)s
"""
a = open(filename, 'w')
try:
a.write(cnt % {'version': VERSION, 'fullversion': FULLVERSION,
'isrelease': str(ISRELEASED)})
finally:
a.close()
setup(name='ipyhi', version=VERSION, description=DESCRIPTION,
long_description=LONG_DESCRIPTION, author='Paul Nation', author_email=
'[email protected]', url='https://github.com/nonhermitian/ipyhi',
license='Apache-2', packages=find_packages(exclude=('tests', 'docs')),
package_data={'ipyhi': ['js/*.js']}, install_requires=['ipython',
'jupyter', 'ipywidgets'], classifiers=[
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9'])
<|reserved_special_token_1|>
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""ipyhi
ipyhi is a Jupyter notebook notification system.
It is based on the jupyter-notify package.
"""
import os
from setuptools import find_packages, setup
MAJOR = 0
MINOR = 1
MICRO = 0
ISRELEASED = False
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
FULLVERSION = VERSION
DOCLINES = __doc__.split('\n')
DESCRIPTION = DOCLINES[0]
LONG_DESCRIPTION = "\n".join(DOCLINES[2:])
def git_short_hash():
try:
git_str = "+" + os.popen('git log -1 --format="%h"').read().strip()
except: # pylint: disable=bare-except
git_str = ""
else:
if git_str == '+': #fixes setuptools PEP issues with versioning
git_str = ''
return git_str
if not ISRELEASED:
FULLVERSION += '.dev'+str(MICRO)+git_short_hash()
def write_version_py(filename='ipyhi/version.py'):
cnt = """\
# THIS FILE IS GENERATED FROM IPYHI SETUP.PY
# pylint: disable=missing-module-docstring
short_version = '%(version)s'
version = '%(fullversion)s'
release = %(isrelease)s
"""
a = open(filename, 'w')
try:
a.write(cnt % {'version': VERSION, 'fullversion':
FULLVERSION, 'isrelease': str(ISRELEASED)})
finally:
a.close()
setup(
name='ipyhi',
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author='Paul Nation',
author_email='[email protected]',
url='https://github.com/nonhermitian/ipyhi',
license='Apache-2',
packages=find_packages(exclude=('tests', 'docs')),
package_data={'ipyhi': ['js/*.js']},
install_requires=[
'ipython',
'jupyter',
'ipywidgets'
],
classifiers=[
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9'
]
)
|
flexible
|
{
"blob_id": "2e2de50a7d366ca1a98d29b33ed157a1e8445ada",
"index": 3523,
"step-1": "<mask token>\n\n\ndef git_short_hash():\n try:\n git_str = '+' + os.popen('git log -1 --format=\"%h\"').read().strip()\n except:\n git_str = ''\n else:\n if git_str == '+':\n git_str = ''\n return git_str\n\n\n<mask token>\n\n\ndef write_version_py(filename='ipyhi/version.py'):\n cnt = \"\"\"# THIS FILE IS GENERATED FROM IPYHI SETUP.PY\n# pylint: disable=missing-module-docstring\nshort_version = '%(version)s'\nversion = '%(fullversion)s'\nrelease = %(isrelease)s\n\"\"\"\n a = open(filename, 'w')\n try:\n a.write(cnt % {'version': VERSION, 'fullversion': FULLVERSION,\n 'isrelease': str(ISRELEASED)})\n finally:\n a.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef git_short_hash():\n try:\n git_str = '+' + os.popen('git log -1 --format=\"%h\"').read().strip()\n except:\n git_str = ''\n else:\n if git_str == '+':\n git_str = ''\n return git_str\n\n\nif not ISRELEASED:\n FULLVERSION += '.dev' + str(MICRO) + git_short_hash()\n\n\ndef write_version_py(filename='ipyhi/version.py'):\n cnt = \"\"\"# THIS FILE IS GENERATED FROM IPYHI SETUP.PY\n# pylint: disable=missing-module-docstring\nshort_version = '%(version)s'\nversion = '%(fullversion)s'\nrelease = %(isrelease)s\n\"\"\"\n a = open(filename, 'w')\n try:\n a.write(cnt % {'version': VERSION, 'fullversion': FULLVERSION,\n 'isrelease': str(ISRELEASED)})\n finally:\n a.close()\n\n\nsetup(name='ipyhi', version=VERSION, description=DESCRIPTION,\n long_description=LONG_DESCRIPTION, author='Paul Nation', author_email=\n '[email protected]', url='https://github.com/nonhermitian/ipyhi',\n license='Apache-2', packages=find_packages(exclude=('tests', 'docs')),\n package_data={'ipyhi': ['js/*.js']}, install_requires=['ipython',\n 'jupyter', 'ipywidgets'], classifiers=[\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9'])\n",
"step-3": "<mask token>\nMAJOR = 0\nMINOR = 1\nMICRO = 0\nISRELEASED = False\nVERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)\nFULLVERSION = VERSION\nDOCLINES = __doc__.split('\\n')\nDESCRIPTION = DOCLINES[0]\nLONG_DESCRIPTION = '\\n'.join(DOCLINES[2:])\n\n\ndef git_short_hash():\n try:\n git_str = '+' + os.popen('git log -1 --format=\"%h\"').read().strip()\n except:\n git_str = ''\n else:\n if git_str == '+':\n git_str = ''\n return git_str\n\n\nif not ISRELEASED:\n FULLVERSION += '.dev' + str(MICRO) + git_short_hash()\n\n\ndef write_version_py(filename='ipyhi/version.py'):\n cnt = \"\"\"# THIS FILE IS GENERATED FROM IPYHI SETUP.PY\n# pylint: disable=missing-module-docstring\nshort_version = '%(version)s'\nversion = '%(fullversion)s'\nrelease = %(isrelease)s\n\"\"\"\n a = open(filename, 'w')\n try:\n a.write(cnt % {'version': VERSION, 'fullversion': FULLVERSION,\n 'isrelease': str(ISRELEASED)})\n finally:\n a.close()\n\n\nsetup(name='ipyhi', version=VERSION, description=DESCRIPTION,\n long_description=LONG_DESCRIPTION, author='Paul Nation', author_email=\n '[email protected]', url='https://github.com/nonhermitian/ipyhi',\n license='Apache-2', packages=find_packages(exclude=('tests', 'docs')),\n package_data={'ipyhi': ['js/*.js']}, install_requires=['ipython',\n 'jupyter', 'ipywidgets'], classifiers=[\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9'])\n",
"step-4": "<mask token>\nimport os\nfrom setuptools import find_packages, setup\nMAJOR = 0\nMINOR = 1\nMICRO = 0\nISRELEASED = False\nVERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)\nFULLVERSION = VERSION\nDOCLINES = __doc__.split('\\n')\nDESCRIPTION = DOCLINES[0]\nLONG_DESCRIPTION = '\\n'.join(DOCLINES[2:])\n\n\ndef git_short_hash():\n try:\n git_str = '+' + os.popen('git log -1 --format=\"%h\"').read().strip()\n except:\n git_str = ''\n else:\n if git_str == '+':\n git_str = ''\n return git_str\n\n\nif not ISRELEASED:\n FULLVERSION += '.dev' + str(MICRO) + git_short_hash()\n\n\ndef write_version_py(filename='ipyhi/version.py'):\n cnt = \"\"\"# THIS FILE IS GENERATED FROM IPYHI SETUP.PY\n# pylint: disable=missing-module-docstring\nshort_version = '%(version)s'\nversion = '%(fullversion)s'\nrelease = %(isrelease)s\n\"\"\"\n a = open(filename, 'w')\n try:\n a.write(cnt % {'version': VERSION, 'fullversion': FULLVERSION,\n 'isrelease': str(ISRELEASED)})\n finally:\n a.close()\n\n\nsetup(name='ipyhi', version=VERSION, description=DESCRIPTION,\n long_description=LONG_DESCRIPTION, author='Paul Nation', author_email=\n '[email protected]', url='https://github.com/nonhermitian/ipyhi',\n license='Apache-2', packages=find_packages(exclude=('tests', 'docs')),\n package_data={'ipyhi': ['js/*.js']}, install_requires=['ipython',\n 'jupyter', 'ipywidgets'], classifiers=[\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9'])\n",
"step-5": "# (C) Copyright IBM 2020.\n#\n# This code is licensed under the Apache License, Version 2.0. You may\n# obtain a copy of this license in the LICENSE.txt file in the root directory\n# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.\n#\n# Any modifications or derivative works of this code must retain this\n# copyright notice, and modified files need to carry a notice indicating\n# that they have been altered from the originals.\n\"\"\"ipyhi\n\nipyhi is a Jupyter notebook notification system.\nIt is based on the jupyter-notify package.\n\"\"\"\nimport os\nfrom setuptools import find_packages, setup\n\nMAJOR = 0\nMINOR = 1\nMICRO = 0\n\nISRELEASED = False\nVERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)\nFULLVERSION = VERSION\n\nDOCLINES = __doc__.split('\\n')\nDESCRIPTION = DOCLINES[0]\nLONG_DESCRIPTION = \"\\n\".join(DOCLINES[2:])\n\ndef git_short_hash():\n try:\n git_str = \"+\" + os.popen('git log -1 --format=\"%h\"').read().strip()\n except: # pylint: disable=bare-except\n git_str = \"\"\n else:\n if git_str == '+': #fixes setuptools PEP issues with versioning\n git_str = ''\n return git_str\n\nif not ISRELEASED:\n FULLVERSION += '.dev'+str(MICRO)+git_short_hash()\n\ndef write_version_py(filename='ipyhi/version.py'):\n cnt = \"\"\"\\\n# THIS FILE IS GENERATED FROM IPYHI SETUP.PY\n# pylint: disable=missing-module-docstring\nshort_version = '%(version)s'\nversion = '%(fullversion)s'\nrelease = %(isrelease)s\n\"\"\"\n a = open(filename, 'w')\n try:\n a.write(cnt % {'version': VERSION, 'fullversion':\n FULLVERSION, 'isrelease': str(ISRELEASED)})\n finally:\n a.close()\n\nsetup(\n name='ipyhi',\n version=VERSION,\n description=DESCRIPTION,\n long_description=LONG_DESCRIPTION,\n author='Paul Nation',\n author_email='[email protected]',\n url='https://github.com/nonhermitian/ipyhi',\n license='Apache-2',\n packages=find_packages(exclude=('tests', 'docs')),\n package_data={'ipyhi': ['js/*.js']},\n install_requires=[\n 'ipython',\n 'jupyter',\n 'ipywidgets'\n ],\n classifiers=[\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9'\n ]\n)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class UserInterface(ABC):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@abstractmethod
def show_message(self, msg: str):
"""Show a message"""
<|reserved_special_token_0|>
@abstractmethod
def close(self):
"""Close the updtater UI"""
class CmdUI(UserInterface):
"""Commande line UI"""
def __init__(self) ->None:
super().__init__()
def show_message(self, msg: str):
print(self.state, msg)
def set_state(self, state: int):
"""Set the program progress by a state value"""
self.state = state
def close(self):
pass
class QtUI(UserInterface):
def __init__(self) ->None:
super().__init__()
app = QApplication(sys.argv)
qpix = QPixmap(':/src/inupdater/data/splash.png')
self.splash = SplashScreen(qpix)
self.splash.set_progress_max(10)
self.splash.show()
def show_message(self, msg: str):
self.splash.set_message(msg)
def set_state(self, state: int):
"""Set the program progress by a state value"""
self.splash.set_progress_value(self.state)
self.state = state
time.sleep(1)
def close(self):
self.splash.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserInterface(ABC):
<|reserved_special_token_0|>
def __init__(self) ->None:
self.state = 0
@abstractmethod
def show_message(self, msg: str):
"""Show a message"""
<|reserved_special_token_0|>
@abstractmethod
def close(self):
"""Close the updtater UI"""
class CmdUI(UserInterface):
"""Commande line UI"""
def __init__(self) ->None:
super().__init__()
def show_message(self, msg: str):
print(self.state, msg)
def set_state(self, state: int):
"""Set the program progress by a state value"""
self.state = state
def close(self):
pass
class QtUI(UserInterface):
def __init__(self) ->None:
super().__init__()
app = QApplication(sys.argv)
qpix = QPixmap(':/src/inupdater/data/splash.png')
self.splash = SplashScreen(qpix)
self.splash.set_progress_max(10)
self.splash.show()
def show_message(self, msg: str):
self.splash.set_message(msg)
def set_state(self, state: int):
"""Set the program progress by a state value"""
self.splash.set_progress_value(self.state)
self.state = state
time.sleep(1)
def close(self):
self.splash.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserInterface(ABC):
<|reserved_special_token_0|>
def __init__(self) ->None:
self.state = 0
@abstractmethod
def show_message(self, msg: str):
"""Show a message"""
@abstractmethod
def set_state(self, state: int):
"""Set the program progress by a state value"""
@abstractmethod
def close(self):
"""Close the updtater UI"""
class CmdUI(UserInterface):
"""Commande line UI"""
def __init__(self) ->None:
super().__init__()
def show_message(self, msg: str):
print(self.state, msg)
def set_state(self, state: int):
"""Set the program progress by a state value"""
self.state = state
def close(self):
pass
class QtUI(UserInterface):
def __init__(self) ->None:
super().__init__()
app = QApplication(sys.argv)
qpix = QPixmap(':/src/inupdater/data/splash.png')
self.splash = SplashScreen(qpix)
self.splash.set_progress_max(10)
self.splash.show()
def show_message(self, msg: str):
self.splash.set_message(msg)
def set_state(self, state: int):
"""Set the program progress by a state value"""
self.splash.set_progress_value(self.state)
self.state = state
time.sleep(1)
def close(self):
self.splash.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserInterface(ABC):
"""Interface for GUI element"""
def __init__(self) ->None:
self.state = 0
@abstractmethod
def show_message(self, msg: str):
"""Show a message"""
@abstractmethod
def set_state(self, state: int):
"""Set the program progress by a state value"""
@abstractmethod
def close(self):
"""Close the updtater UI"""
class CmdUI(UserInterface):
"""Commande line UI"""
def __init__(self) ->None:
super().__init__()
def show_message(self, msg: str):
print(self.state, msg)
def set_state(self, state: int):
"""Set the program progress by a state value"""
self.state = state
def close(self):
pass
class QtUI(UserInterface):
def __init__(self) ->None:
super().__init__()
app = QApplication(sys.argv)
qpix = QPixmap(':/src/inupdater/data/splash.png')
self.splash = SplashScreen(qpix)
self.splash.set_progress_max(10)
self.splash.show()
def show_message(self, msg: str):
self.splash.set_message(msg)
def set_state(self, state: int):
"""Set the program progress by a state value"""
self.splash.set_progress_value(self.state)
self.state = state
time.sleep(1)
def close(self):
self.splash.close()
<|reserved_special_token_1|>
import sys
import time
from abc import ABC, abstractmethod
from PySide6.QtGui import QPixmap
from PySide6.QtWidgets import QApplication
import inupdater.resource
from inupdater.splash import SplashScreen
class UserInterface(ABC):
"""Interface for GUI element"""
def __init__(self) -> None:
self.state = 0
@abstractmethod
def show_message(self, msg: str):
"""Show a message"""
@abstractmethod
def set_state(self, state: int):
"""Set the program progress by a state value"""
@abstractmethod
def close(self):
"""Close the updtater UI"""
class CmdUI(UserInterface):
"""Commande line UI"""
def __init__(self) -> None:
super().__init__()
def show_message(self, msg: str):
print(self.state, msg)
def set_state(self, state: int):
"""Set the program progress by a state value"""
self.state = state
def close(self):
pass
class QtUI(UserInterface):
def __init__(self) -> None:
super().__init__()
app = QApplication(sys.argv)
qpix = QPixmap(":/src/inupdater/data/splash.png")
self.splash = SplashScreen(qpix)
self.splash.set_progress_max(10)
self.splash.show()
def show_message(self, msg: str):
self.splash.set_message(msg)
def set_state(self, state: int):
"""Set the program progress by a state value"""
self.splash.set_progress_value(self.state)
self.state = state
time.sleep(1)
def close(self):
self.splash.close()
|
flexible
|
{
"blob_id": "efeb069a7e2aab7262a557236c693752d2973523",
"index": 4169,
"step-1": "<mask token>\n\n\nclass UserInterface(ABC):\n <mask token>\n <mask token>\n\n @abstractmethod\n def show_message(self, msg: str):\n \"\"\"Show a message\"\"\"\n <mask token>\n\n @abstractmethod\n def close(self):\n \"\"\"Close the updtater UI\"\"\"\n\n\nclass CmdUI(UserInterface):\n \"\"\"Commande line UI\"\"\"\n\n def __init__(self) ->None:\n super().__init__()\n\n def show_message(self, msg: str):\n print(self.state, msg)\n\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n self.state = state\n\n def close(self):\n pass\n\n\nclass QtUI(UserInterface):\n\n def __init__(self) ->None:\n super().__init__()\n app = QApplication(sys.argv)\n qpix = QPixmap(':/src/inupdater/data/splash.png')\n self.splash = SplashScreen(qpix)\n self.splash.set_progress_max(10)\n self.splash.show()\n\n def show_message(self, msg: str):\n self.splash.set_message(msg)\n\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n self.splash.set_progress_value(self.state)\n self.state = state\n time.sleep(1)\n\n def close(self):\n self.splash.close()\n",
"step-2": "<mask token>\n\n\nclass UserInterface(ABC):\n <mask token>\n\n def __init__(self) ->None:\n self.state = 0\n\n @abstractmethod\n def show_message(self, msg: str):\n \"\"\"Show a message\"\"\"\n <mask token>\n\n @abstractmethod\n def close(self):\n \"\"\"Close the updtater UI\"\"\"\n\n\nclass CmdUI(UserInterface):\n \"\"\"Commande line UI\"\"\"\n\n def __init__(self) ->None:\n super().__init__()\n\n def show_message(self, msg: str):\n print(self.state, msg)\n\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n self.state = state\n\n def close(self):\n pass\n\n\nclass QtUI(UserInterface):\n\n def __init__(self) ->None:\n super().__init__()\n app = QApplication(sys.argv)\n qpix = QPixmap(':/src/inupdater/data/splash.png')\n self.splash = SplashScreen(qpix)\n self.splash.set_progress_max(10)\n self.splash.show()\n\n def show_message(self, msg: str):\n self.splash.set_message(msg)\n\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n self.splash.set_progress_value(self.state)\n self.state = state\n time.sleep(1)\n\n def close(self):\n self.splash.close()\n",
"step-3": "<mask token>\n\n\nclass UserInterface(ABC):\n <mask token>\n\n def __init__(self) ->None:\n self.state = 0\n\n @abstractmethod\n def show_message(self, msg: str):\n \"\"\"Show a message\"\"\"\n\n @abstractmethod\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n\n @abstractmethod\n def close(self):\n \"\"\"Close the updtater UI\"\"\"\n\n\nclass CmdUI(UserInterface):\n \"\"\"Commande line UI\"\"\"\n\n def __init__(self) ->None:\n super().__init__()\n\n def show_message(self, msg: str):\n print(self.state, msg)\n\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n self.state = state\n\n def close(self):\n pass\n\n\nclass QtUI(UserInterface):\n\n def __init__(self) ->None:\n super().__init__()\n app = QApplication(sys.argv)\n qpix = QPixmap(':/src/inupdater/data/splash.png')\n self.splash = SplashScreen(qpix)\n self.splash.set_progress_max(10)\n self.splash.show()\n\n def show_message(self, msg: str):\n self.splash.set_message(msg)\n\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n self.splash.set_progress_value(self.state)\n self.state = state\n time.sleep(1)\n\n def close(self):\n self.splash.close()\n",
"step-4": "<mask token>\n\n\nclass UserInterface(ABC):\n \"\"\"Interface for GUI element\"\"\"\n\n def __init__(self) ->None:\n self.state = 0\n\n @abstractmethod\n def show_message(self, msg: str):\n \"\"\"Show a message\"\"\"\n\n @abstractmethod\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n\n @abstractmethod\n def close(self):\n \"\"\"Close the updtater UI\"\"\"\n\n\nclass CmdUI(UserInterface):\n \"\"\"Commande line UI\"\"\"\n\n def __init__(self) ->None:\n super().__init__()\n\n def show_message(self, msg: str):\n print(self.state, msg)\n\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n self.state = state\n\n def close(self):\n pass\n\n\nclass QtUI(UserInterface):\n\n def __init__(self) ->None:\n super().__init__()\n app = QApplication(sys.argv)\n qpix = QPixmap(':/src/inupdater/data/splash.png')\n self.splash = SplashScreen(qpix)\n self.splash.set_progress_max(10)\n self.splash.show()\n\n def show_message(self, msg: str):\n self.splash.set_message(msg)\n\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n self.splash.set_progress_value(self.state)\n self.state = state\n time.sleep(1)\n\n def close(self):\n self.splash.close()\n",
"step-5": "import sys\nimport time\nfrom abc import ABC, abstractmethod\n\nfrom PySide6.QtGui import QPixmap\nfrom PySide6.QtWidgets import QApplication\n\nimport inupdater.resource\nfrom inupdater.splash import SplashScreen\n\n\nclass UserInterface(ABC):\n \"\"\"Interface for GUI element\"\"\"\n\n def __init__(self) -> None:\n self.state = 0\n\n @abstractmethod\n def show_message(self, msg: str):\n \"\"\"Show a message\"\"\"\n\n @abstractmethod\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n\n @abstractmethod\n def close(self):\n \"\"\"Close the updtater UI\"\"\"\n\n\nclass CmdUI(UserInterface):\n \"\"\"Commande line UI\"\"\"\n\n def __init__(self) -> None:\n super().__init__()\n\n def show_message(self, msg: str):\n print(self.state, msg)\n\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n self.state = state\n\n def close(self):\n pass\n\n\nclass QtUI(UserInterface):\n def __init__(self) -> None:\n super().__init__()\n app = QApplication(sys.argv)\n qpix = QPixmap(\":/src/inupdater/data/splash.png\")\n self.splash = SplashScreen(qpix)\n self.splash.set_progress_max(10)\n self.splash.show()\n\n def show_message(self, msg: str):\n self.splash.set_message(msg)\n\n def set_state(self, state: int):\n \"\"\"Set the program progress by a state value\"\"\"\n self.splash.set_progress_value(self.state)\n self.state = state\n time.sleep(1)\n\n def close(self):\n self.splash.close()\n",
"step-ids": [
14,
15,
16,
17,
19
]
}
|
[
14,
15,
16,
17,
19
] |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011 Taobao .Inc
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://code.taobao.org/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://code.taobao.org/.
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import *
from django import forms
from django.db.models import Count,Sum,Q
from taocode2.models import *
from taocode2.helper.utils import *
from taocode2.helper.func import wrap
from taocode2.helper import consts
from taocode2.apps.user import activity
from taocode2.apps.repos import svn
from taocode2.settings import *
import time
__author__ = '[email protected]'
def build_prj_nav_menu(request, project, choice = None):
uri = '/p/'+project.name
navmenus = [{'uri': uri + '/src', 'txt':'source'},
{'uri': uri + '/issues', 'txt':'issues'},
{'uri': uri + '/wiki', 'txt':'wiki'},
{'uri': uri + '/info', 'txt':'info'}]
if project.owner == request.user:
navmenus.append({'uri': uri + '/admin', 'txt':'admin'})
if choice is None:
navmenus[0]['choice'] = True
else:
for m in navmenus:
if m['uri'].endswith(choice):
m['choice'] = True
return navmenus
def need_owner(view_func):
def _wrapped_view(request, *args, **kwargs):
rc = request.rc
rc.project = q_get(Project, name=kwargs['name'],
status = consts.PROJECT_ENABLE)
rc.project_name = kwargs['name']
if rc.project == None:
raise Http404
if rc.project.owner != request.user:
if request.user.supper is False:
return HttpResponseForbidden()
return view_func(request, *args, **kwargs)
return wrap(view_func, _wrapped_view)
def can_access(prj, user):
if prj is None or prj.status != consts.PROJECT_ENABLE:
raise Http404
if prj.is_public:
return None
if user.is_authenticated() is False:
return HttpResponseForbidden()
if prj.owner != user:
pm = q_get(ProjectMember, project = prj, user = user)
if pm is None:
return HttpResponseForbidden()
return None
def can_write(prj, user):
if prj is None or prj.status != consts.PROJECT_ENABLE:
return False
if user.is_authenticated() is False:
return False
if prj.owner != user:
pm = q_get(ProjectMember, project = prj, user = user)
if pm is None:
return False
return True
@need_owner
@as_json
@login_required
def do_invite(request, name):
if request.method != 'POST':
return False
uname = request.POST.get('u', '').strip()
if len(uname) <= 0:
return False
user = q_get(User, Q(name=uname)|Q(email=uname))
if user is None or user == request.user:
return False
rc = request.rc
pm = q_get(ProjectMember,
project=rc.project, user=user)
if pm is not None:
if pm.member_type != consts.PM_ACCEPT_INV:
pm.member_type = consts.PM_SEND_INV
pm.save()
return True
pm = ProjectMember()
pm.project = rc.project
pm.user = user
pm.member_type = consts.PM_SEND_INV
pm.save()
return True
@login_required
@need_owner
def project_admin(request, name):
rc = request.rc
rc.pagename = name + ' admin'
uri = request.META['PATH_INFO']
#rc.navmenus = [{'uri': uri, 'txt':'basic', 'choice':True},
# {'uri': uri + 'resources', 'txt':'resources'}]
rc.navmenus = build_prj_nav_menu(request, rc.project, 'admin')
res = []
vls = q_gets(Issue, project = rc.project,
status__in = (consts.ISSUE_OPEN,
consts.ISSUE_CLOSED)).values('project').annotate(pc=Count('project'))
res.append(['Issue Count',
len(vls) > 0 and vls[0]['pc'] or 0])
vls = q_gets(ProjectAttachment, project = rc.project,
status = consts.FILE_ENABLE).values('project').annotate(pc=Count('project'))
res.append(['Attachemts Count',
len(vls) > 0 and vls[0]['pc'] or 0])
vls = q_gets(ProjectAttachment,
project = rc.project,
status = consts.FILE_ENABLE).values('project').annotate(ps=Sum('size'))
si = (len(vls) > 0 and vls[0]['ps'] or 0) / (1024*1024.0)
res.append(['Attachemts Total Size','%.4s MB'%si])
r,out, err = exec_cmd(['du','-sbh', os.path.join(settings.REPOS_ROOT, name)])
res.append(['Repository Usage', r != 0 and '0.0 MB' or out.split()[0]])
rc.res = res
rc.licenses = map(lambda x:x[0], consts.LICENSES)
if rc.project.status != consts.PROJECT_ENABLE:
raise Http404
return send_response(request, 'project/admin.html')
@login_required
@need_owner
def project_resources(request, name):
rc = request.rc
rc.pagename = 'Project resources usages'
uri = '/p/'+name+'/admin'
rc.navmenus = [{'uri': uri, 'txt':'basic'},
{'uri': uri + 'resouces',
'txt':'resources', 'choice':True}]
if rc.project.status != consts.PROJECT_ENABLE:
raise Http404
return send_response(request, 'project/resources.html')
@as_json
def get_members(request, name):
project = q_get(Project, name=name)
if project is None:
return False
resp = can_access(project, request.user)
if resp is not None:
return False
members = q_gets(ProjectMember, project=project)
return (True, [m.json() for m in members])
def do_invite_op(request, name, op):
if request.method != 'POST':
return False
project = q_get(Project, Q(name=name))
if project is None:
return False
pm = q_get(ProjectMember, project=project, user=request.user)
if pm is None:
return False
pm.member_type = op
pm.save()
if op == consts.PM_ACCEPT_INV:
activity.join_member(project, request.user, request.user)
return True
@as_json
@login_required
def do_accept(request, name):
return do_invite_op(request, name,
consts.PM_ACCEPT_INV)
@as_json
@login_required
def do_reject(request, name):
return do_invite_op(request, name,
consts.PM_REJECT_INV)
@as_json
@login_required
def do_exit(request, name):
project = q_get(Project, name = name)
if project is None:
return False
ProjectMember.objects.filter(project = project,
user = request.user).delete()
activity.leave_member(project, request.user, request.user)
return True
@login_required
@need_owner
@as_json
def del_member(request, name):
if request.method != 'POST':
return False
uname = request.POST.get('u', '').strip()
if len(uname) <= 0:
return False
rc = request.rc
ProjectMember.objects.filter(project = rc.project,
user = User.objects.filter(name=uname)).delete()
return True
@login_required
@need_owner
@as_json
def del_prj(request, name):
if request.method != 'POST':
return False
del_name = name + '__DELETED__%s'%time.time()
project = request.rc.project
old_name = project.name
project.name = del_name
project.status = consts.PROJECT_MARK_DELETED
project.save()
svn.del_repos(old_name, del_name)
return (True, reverse('apps.user.views.view_user', args=[]))
@login_required
@need_owner
@as_json
def edit_prj(request, name):
if request.method != 'POST':
return False
project = request.rc.project
title = request.POST.get('t','').strip()
if len(title) <= 0:
return False
license = request.POST.get('l','').strip()
is_public = request.POST.get('pub','0').strip()
project.title = title
project.license = license
project.is_public = bool(int(is_public))
project.save()
return True
|
normal
|
{
"blob_id": "bacd0c729193f064b21ab8e01e98dfc276094458",
"index": 7853,
"step-1": "<mask token>\n\n\ndef need_owner(view_func):\n\n def _wrapped_view(request, *args, **kwargs):\n rc = request.rc\n rc.project = q_get(Project, name=kwargs['name'], status=consts.\n PROJECT_ENABLE)\n rc.project_name = kwargs['name']\n if rc.project == None:\n raise Http404\n if rc.project.owner != request.user:\n if request.user.supper is False:\n return HttpResponseForbidden()\n return view_func(request, *args, **kwargs)\n return wrap(view_func, _wrapped_view)\n\n\ndef can_access(prj, user):\n if prj is None or prj.status != consts.PROJECT_ENABLE:\n raise Http404\n if prj.is_public:\n return None\n if user.is_authenticated() is False:\n return HttpResponseForbidden()\n if prj.owner != user:\n pm = q_get(ProjectMember, project=prj, user=user)\n if pm is None:\n return HttpResponseForbidden()\n return None\n\n\n<mask token>\n\n\n@login_required\n@need_owner\ndef project_admin(request, name):\n rc = request.rc\n rc.pagename = name + ' admin'\n uri = request.META['PATH_INFO']\n rc.navmenus = build_prj_nav_menu(request, rc.project, 'admin')\n res = []\n vls = q_gets(Issue, project=rc.project, status__in=(consts.ISSUE_OPEN,\n consts.ISSUE_CLOSED)).values('project').annotate(pc=Count('project'))\n res.append(['Issue Count', len(vls) > 0 and vls[0]['pc'] or 0])\n vls = q_gets(ProjectAttachment, project=rc.project, status=consts.\n FILE_ENABLE).values('project').annotate(pc=Count('project'))\n res.append(['Attachemts Count', len(vls) > 0 and vls[0]['pc'] or 0])\n vls = q_gets(ProjectAttachment, project=rc.project, status=consts.\n FILE_ENABLE).values('project').annotate(ps=Sum('size'))\n si = (len(vls) > 0 and vls[0]['ps'] or 0) / (1024 * 1024.0)\n res.append(['Attachemts Total Size', '%.4s MB' % si])\n r, out, err = exec_cmd(['du', '-sbh', os.path.join(settings.REPOS_ROOT,\n name)])\n res.append(['Repository Usage', r != 0 and '0.0 MB' or out.split()[0]])\n rc.res = res\n rc.licenses = map(lambda x: x[0], consts.LICENSES)\n if rc.project.status != consts.PROJECT_ENABLE:\n raise Http404\n return send_response(request, 'project/admin.html')\n\n\n@login_required\n@need_owner\ndef project_resources(request, name):\n rc = request.rc\n rc.pagename = 'Project resources usages'\n uri = '/p/' + name + '/admin'\n rc.navmenus = [{'uri': uri, 'txt': 'basic'}, {'uri': uri + 'resouces',\n 'txt': 'resources', 'choice': True}]\n if rc.project.status != consts.PROJECT_ENABLE:\n raise Http404\n return send_response(request, 'project/resources.html')\n\n\n<mask token>\n\n\n@as_json\n@login_required\ndef do_accept(request, name):\n return do_invite_op(request, name, consts.PM_ACCEPT_INV)\n\n\n<mask token>\n\n\n@as_json\n@login_required\ndef do_exit(request, name):\n project = q_get(Project, name=name)\n if project is None:\n return False\n ProjectMember.objects.filter(project=project, user=request.user).delete()\n activity.leave_member(project, request.user, request.user)\n return True\n\n\n<mask token>\n\n\n@login_required\n@need_owner\n@as_json\ndef del_prj(request, name):\n if request.method != 'POST':\n return False\n del_name = name + '__DELETED__%s' % time.time()\n project = request.rc.project\n old_name = project.name\n project.name = del_name\n project.status = consts.PROJECT_MARK_DELETED\n project.save()\n svn.del_repos(old_name, del_name)\n return True, reverse('apps.user.views.view_user', args=[])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef build_prj_nav_menu(request, project, choice=None):\n uri = '/p/' + project.name\n navmenus = [{'uri': uri + '/src', 'txt': 'source'}, {'uri': uri +\n '/issues', 'txt': 'issues'}, {'uri': uri + '/wiki', 'txt': 'wiki'},\n {'uri': uri + '/info', 'txt': 'info'}]\n if project.owner == request.user:\n navmenus.append({'uri': uri + '/admin', 'txt': 'admin'})\n if choice is None:\n navmenus[0]['choice'] = True\n else:\n for m in navmenus:\n if m['uri'].endswith(choice):\n m['choice'] = True\n return navmenus\n\n\ndef need_owner(view_func):\n\n def _wrapped_view(request, *args, **kwargs):\n rc = request.rc\n rc.project = q_get(Project, name=kwargs['name'], status=consts.\n PROJECT_ENABLE)\n rc.project_name = kwargs['name']\n if rc.project == None:\n raise Http404\n if rc.project.owner != request.user:\n if request.user.supper is False:\n return HttpResponseForbidden()\n return view_func(request, *args, **kwargs)\n return wrap(view_func, _wrapped_view)\n\n\ndef can_access(prj, user):\n if prj is None or prj.status != consts.PROJECT_ENABLE:\n raise Http404\n if prj.is_public:\n return None\n if user.is_authenticated() is False:\n return HttpResponseForbidden()\n if prj.owner != user:\n pm = q_get(ProjectMember, project=prj, user=user)\n if pm is None:\n return HttpResponseForbidden()\n return None\n\n\ndef can_write(prj, user):\n if prj is None or prj.status != consts.PROJECT_ENABLE:\n return False\n if user.is_authenticated() is False:\n return False\n if prj.owner != user:\n pm = q_get(ProjectMember, project=prj, user=user)\n if pm is None:\n return False\n return True\n\n\n@need_owner\n@as_json\n@login_required\ndef do_invite(request, name):\n if request.method != 'POST':\n return False\n uname = request.POST.get('u', '').strip()\n if len(uname) <= 0:\n return False\n user = q_get(User, Q(name=uname) | Q(email=uname))\n if user is None or user == request.user:\n return False\n rc = request.rc\n pm = q_get(ProjectMember, project=rc.project, user=user)\n if pm is not None:\n if pm.member_type != consts.PM_ACCEPT_INV:\n pm.member_type = consts.PM_SEND_INV\n pm.save()\n return True\n pm = ProjectMember()\n pm.project = rc.project\n pm.user = user\n pm.member_type = consts.PM_SEND_INV\n pm.save()\n return True\n\n\n@login_required\n@need_owner\ndef project_admin(request, name):\n rc = request.rc\n rc.pagename = name + ' admin'\n uri = request.META['PATH_INFO']\n rc.navmenus = build_prj_nav_menu(request, rc.project, 'admin')\n res = []\n vls = q_gets(Issue, project=rc.project, status__in=(consts.ISSUE_OPEN,\n consts.ISSUE_CLOSED)).values('project').annotate(pc=Count('project'))\n res.append(['Issue Count', len(vls) > 0 and vls[0]['pc'] or 0])\n vls = q_gets(ProjectAttachment, project=rc.project, status=consts.\n FILE_ENABLE).values('project').annotate(pc=Count('project'))\n res.append(['Attachemts Count', len(vls) > 0 and vls[0]['pc'] or 0])\n vls = q_gets(ProjectAttachment, project=rc.project, status=consts.\n FILE_ENABLE).values('project').annotate(ps=Sum('size'))\n si = (len(vls) > 0 and vls[0]['ps'] or 0) / (1024 * 1024.0)\n res.append(['Attachemts Total Size', '%.4s MB' % si])\n r, out, err = exec_cmd(['du', '-sbh', os.path.join(settings.REPOS_ROOT,\n name)])\n res.append(['Repository Usage', r != 0 and '0.0 MB' or out.split()[0]])\n rc.res = res\n rc.licenses = map(lambda x: x[0], consts.LICENSES)\n if rc.project.status != consts.PROJECT_ENABLE:\n raise Http404\n return send_response(request, 'project/admin.html')\n\n\n@login_required\n@need_owner\ndef project_resources(request, name):\n rc = request.rc\n rc.pagename = 'Project resources usages'\n uri = '/p/' + name + '/admin'\n rc.navmenus = [{'uri': uri, 'txt': 'basic'}, {'uri': uri + 'resouces',\n 'txt': 'resources', 'choice': True}]\n if rc.project.status != consts.PROJECT_ENABLE:\n raise Http404\n return send_response(request, 'project/resources.html')\n\n\n<mask token>\n\n\ndef do_invite_op(request, name, op):\n if request.method != 'POST':\n return False\n project = q_get(Project, Q(name=name))\n if project is None:\n return False\n pm = q_get(ProjectMember, project=project, user=request.user)\n if pm is None:\n return False\n pm.member_type = op\n pm.save()\n if op == consts.PM_ACCEPT_INV:\n activity.join_member(project, request.user, request.user)\n return True\n\n\n@as_json\n@login_required\ndef do_accept(request, name):\n return do_invite_op(request, name, consts.PM_ACCEPT_INV)\n\n\n@as_json\n@login_required\ndef do_reject(request, name):\n return do_invite_op(request, name, consts.PM_REJECT_INV)\n\n\n@as_json\n@login_required\ndef do_exit(request, name):\n project = q_get(Project, name=name)\n if project is None:\n return False\n ProjectMember.objects.filter(project=project, user=request.user).delete()\n activity.leave_member(project, request.user, request.user)\n return True\n\n\n@login_required\n@need_owner\n@as_json\ndef del_member(request, name):\n if request.method != 'POST':\n return False\n uname = request.POST.get('u', '').strip()\n if len(uname) <= 0:\n return False\n rc = request.rc\n ProjectMember.objects.filter(project=rc.project, user=User.objects.\n filter(name=uname)).delete()\n return True\n\n\n@login_required\n@need_owner\n@as_json\ndef del_prj(request, name):\n if request.method != 'POST':\n return False\n del_name = name + '__DELETED__%s' % time.time()\n project = request.rc.project\n old_name = project.name\n project.name = del_name\n project.status = consts.PROJECT_MARK_DELETED\n project.save()\n svn.del_repos(old_name, del_name)\n return True, reverse('apps.user.views.view_user', args=[])\n\n\n@login_required\n@need_owner\n@as_json\ndef edit_prj(request, name):\n if request.method != 'POST':\n return False\n project = request.rc.project\n title = request.POST.get('t', '').strip()\n if len(title) <= 0:\n return False\n license = request.POST.get('l', '').strip()\n is_public = request.POST.get('pub', '0').strip()\n project.title = title\n project.license = license\n project.is_public = bool(int(is_public))\n project.save()\n return True\n",
"step-3": "<mask token>\n__author__ = '[email protected]'\n\n\ndef build_prj_nav_menu(request, project, choice=None):\n uri = '/p/' + project.name\n navmenus = [{'uri': uri + '/src', 'txt': 'source'}, {'uri': uri +\n '/issues', 'txt': 'issues'}, {'uri': uri + '/wiki', 'txt': 'wiki'},\n {'uri': uri + '/info', 'txt': 'info'}]\n if project.owner == request.user:\n navmenus.append({'uri': uri + '/admin', 'txt': 'admin'})\n if choice is None:\n navmenus[0]['choice'] = True\n else:\n for m in navmenus:\n if m['uri'].endswith(choice):\n m['choice'] = True\n return navmenus\n\n\ndef need_owner(view_func):\n\n def _wrapped_view(request, *args, **kwargs):\n rc = request.rc\n rc.project = q_get(Project, name=kwargs['name'], status=consts.\n PROJECT_ENABLE)\n rc.project_name = kwargs['name']\n if rc.project == None:\n raise Http404\n if rc.project.owner != request.user:\n if request.user.supper is False:\n return HttpResponseForbidden()\n return view_func(request, *args, **kwargs)\n return wrap(view_func, _wrapped_view)\n\n\ndef can_access(prj, user):\n if prj is None or prj.status != consts.PROJECT_ENABLE:\n raise Http404\n if prj.is_public:\n return None\n if user.is_authenticated() is False:\n return HttpResponseForbidden()\n if prj.owner != user:\n pm = q_get(ProjectMember, project=prj, user=user)\n if pm is None:\n return HttpResponseForbidden()\n return None\n\n\ndef can_write(prj, user):\n if prj is None or prj.status != consts.PROJECT_ENABLE:\n return False\n if user.is_authenticated() is False:\n return False\n if prj.owner != user:\n pm = q_get(ProjectMember, project=prj, user=user)\n if pm is None:\n return False\n return True\n\n\n@need_owner\n@as_json\n@login_required\ndef do_invite(request, name):\n if request.method != 'POST':\n return False\n uname = request.POST.get('u', '').strip()\n if len(uname) <= 0:\n return False\n user = q_get(User, Q(name=uname) | Q(email=uname))\n if user is None or user == request.user:\n return False\n rc = request.rc\n pm = q_get(ProjectMember, project=rc.project, user=user)\n if pm is not None:\n if pm.member_type != consts.PM_ACCEPT_INV:\n pm.member_type = consts.PM_SEND_INV\n pm.save()\n return True\n pm = ProjectMember()\n pm.project = rc.project\n pm.user = user\n pm.member_type = consts.PM_SEND_INV\n pm.save()\n return True\n\n\n@login_required\n@need_owner\ndef project_admin(request, name):\n rc = request.rc\n rc.pagename = name + ' admin'\n uri = request.META['PATH_INFO']\n rc.navmenus = build_prj_nav_menu(request, rc.project, 'admin')\n res = []\n vls = q_gets(Issue, project=rc.project, status__in=(consts.ISSUE_OPEN,\n consts.ISSUE_CLOSED)).values('project').annotate(pc=Count('project'))\n res.append(['Issue Count', len(vls) > 0 and vls[0]['pc'] or 0])\n vls = q_gets(ProjectAttachment, project=rc.project, status=consts.\n FILE_ENABLE).values('project').annotate(pc=Count('project'))\n res.append(['Attachemts Count', len(vls) > 0 and vls[0]['pc'] or 0])\n vls = q_gets(ProjectAttachment, project=rc.project, status=consts.\n FILE_ENABLE).values('project').annotate(ps=Sum('size'))\n si = (len(vls) > 0 and vls[0]['ps'] or 0) / (1024 * 1024.0)\n res.append(['Attachemts Total Size', '%.4s MB' % si])\n r, out, err = exec_cmd(['du', '-sbh', os.path.join(settings.REPOS_ROOT,\n name)])\n res.append(['Repository Usage', r != 0 and '0.0 MB' or out.split()[0]])\n rc.res = res\n rc.licenses = map(lambda x: x[0], consts.LICENSES)\n if rc.project.status != consts.PROJECT_ENABLE:\n raise Http404\n return send_response(request, 'project/admin.html')\n\n\n@login_required\n@need_owner\ndef project_resources(request, name):\n rc = request.rc\n rc.pagename = 'Project resources usages'\n uri = '/p/' + name + '/admin'\n rc.navmenus = [{'uri': uri, 'txt': 'basic'}, {'uri': uri + 'resouces',\n 'txt': 'resources', 'choice': True}]\n if rc.project.status != consts.PROJECT_ENABLE:\n raise Http404\n return send_response(request, 'project/resources.html')\n\n\n@as_json\ndef get_members(request, name):\n project = q_get(Project, name=name)\n if project is None:\n return False\n resp = can_access(project, request.user)\n if resp is not None:\n return False\n members = q_gets(ProjectMember, project=project)\n return True, [m.json() for m in members]\n\n\ndef do_invite_op(request, name, op):\n if request.method != 'POST':\n return False\n project = q_get(Project, Q(name=name))\n if project is None:\n return False\n pm = q_get(ProjectMember, project=project, user=request.user)\n if pm is None:\n return False\n pm.member_type = op\n pm.save()\n if op == consts.PM_ACCEPT_INV:\n activity.join_member(project, request.user, request.user)\n return True\n\n\n@as_json\n@login_required\ndef do_accept(request, name):\n return do_invite_op(request, name, consts.PM_ACCEPT_INV)\n\n\n@as_json\n@login_required\ndef do_reject(request, name):\n return do_invite_op(request, name, consts.PM_REJECT_INV)\n\n\n@as_json\n@login_required\ndef do_exit(request, name):\n project = q_get(Project, name=name)\n if project is None:\n return False\n ProjectMember.objects.filter(project=project, user=request.user).delete()\n activity.leave_member(project, request.user, request.user)\n return True\n\n\n@login_required\n@need_owner\n@as_json\ndef del_member(request, name):\n if request.method != 'POST':\n return False\n uname = request.POST.get('u', '').strip()\n if len(uname) <= 0:\n return False\n rc = request.rc\n ProjectMember.objects.filter(project=rc.project, user=User.objects.\n filter(name=uname)).delete()\n return True\n\n\n@login_required\n@need_owner\n@as_json\ndef del_prj(request, name):\n if request.method != 'POST':\n return False\n del_name = name + '__DELETED__%s' % time.time()\n project = request.rc.project\n old_name = project.name\n project.name = del_name\n project.status = consts.PROJECT_MARK_DELETED\n project.save()\n svn.del_repos(old_name, del_name)\n return True, reverse('apps.user.views.view_user', args=[])\n\n\n@login_required\n@need_owner\n@as_json\ndef edit_prj(request, name):\n if request.method != 'POST':\n return False\n project = request.rc.project\n title = request.POST.get('t', '').strip()\n if len(title) <= 0:\n return False\n license = request.POST.get('l', '').strip()\n is_public = request.POST.get('pub', '0').strip()\n project.title = title\n project.license = license\n project.is_public = bool(int(is_public))\n project.save()\n return True\n",
"step-4": "from django.contrib.auth.decorators import login_required\nfrom django.core.urlresolvers import reverse\nfrom django.http import *\nfrom django import forms\nfrom django.db.models import Count, Sum, Q\nfrom taocode2.models import *\nfrom taocode2.helper.utils import *\nfrom taocode2.helper.func import wrap\nfrom taocode2.helper import consts\nfrom taocode2.apps.user import activity\nfrom taocode2.apps.repos import svn\nfrom taocode2.settings import *\nimport time\n__author__ = '[email protected]'\n\n\ndef build_prj_nav_menu(request, project, choice=None):\n uri = '/p/' + project.name\n navmenus = [{'uri': uri + '/src', 'txt': 'source'}, {'uri': uri +\n '/issues', 'txt': 'issues'}, {'uri': uri + '/wiki', 'txt': 'wiki'},\n {'uri': uri + '/info', 'txt': 'info'}]\n if project.owner == request.user:\n navmenus.append({'uri': uri + '/admin', 'txt': 'admin'})\n if choice is None:\n navmenus[0]['choice'] = True\n else:\n for m in navmenus:\n if m['uri'].endswith(choice):\n m['choice'] = True\n return navmenus\n\n\ndef need_owner(view_func):\n\n def _wrapped_view(request, *args, **kwargs):\n rc = request.rc\n rc.project = q_get(Project, name=kwargs['name'], status=consts.\n PROJECT_ENABLE)\n rc.project_name = kwargs['name']\n if rc.project == None:\n raise Http404\n if rc.project.owner != request.user:\n if request.user.supper is False:\n return HttpResponseForbidden()\n return view_func(request, *args, **kwargs)\n return wrap(view_func, _wrapped_view)\n\n\ndef can_access(prj, user):\n if prj is None or prj.status != consts.PROJECT_ENABLE:\n raise Http404\n if prj.is_public:\n return None\n if user.is_authenticated() is False:\n return HttpResponseForbidden()\n if prj.owner != user:\n pm = q_get(ProjectMember, project=prj, user=user)\n if pm is None:\n return HttpResponseForbidden()\n return None\n\n\ndef can_write(prj, user):\n if prj is None or prj.status != consts.PROJECT_ENABLE:\n return False\n if user.is_authenticated() is False:\n return False\n if prj.owner != user:\n pm = q_get(ProjectMember, project=prj, user=user)\n if pm is None:\n return False\n return True\n\n\n@need_owner\n@as_json\n@login_required\ndef do_invite(request, name):\n if request.method != 'POST':\n return False\n uname = request.POST.get('u', '').strip()\n if len(uname) <= 0:\n return False\n user = q_get(User, Q(name=uname) | Q(email=uname))\n if user is None or user == request.user:\n return False\n rc = request.rc\n pm = q_get(ProjectMember, project=rc.project, user=user)\n if pm is not None:\n if pm.member_type != consts.PM_ACCEPT_INV:\n pm.member_type = consts.PM_SEND_INV\n pm.save()\n return True\n pm = ProjectMember()\n pm.project = rc.project\n pm.user = user\n pm.member_type = consts.PM_SEND_INV\n pm.save()\n return True\n\n\n@login_required\n@need_owner\ndef project_admin(request, name):\n rc = request.rc\n rc.pagename = name + ' admin'\n uri = request.META['PATH_INFO']\n rc.navmenus = build_prj_nav_menu(request, rc.project, 'admin')\n res = []\n vls = q_gets(Issue, project=rc.project, status__in=(consts.ISSUE_OPEN,\n consts.ISSUE_CLOSED)).values('project').annotate(pc=Count('project'))\n res.append(['Issue Count', len(vls) > 0 and vls[0]['pc'] or 0])\n vls = q_gets(ProjectAttachment, project=rc.project, status=consts.\n FILE_ENABLE).values('project').annotate(pc=Count('project'))\n res.append(['Attachemts Count', len(vls) > 0 and vls[0]['pc'] or 0])\n vls = q_gets(ProjectAttachment, project=rc.project, status=consts.\n FILE_ENABLE).values('project').annotate(ps=Sum('size'))\n si = (len(vls) > 0 and vls[0]['ps'] or 0) / (1024 * 1024.0)\n res.append(['Attachemts Total Size', '%.4s MB' % si])\n r, out, err = exec_cmd(['du', '-sbh', os.path.join(settings.REPOS_ROOT,\n name)])\n res.append(['Repository Usage', r != 0 and '0.0 MB' or out.split()[0]])\n rc.res = res\n rc.licenses = map(lambda x: x[0], consts.LICENSES)\n if rc.project.status != consts.PROJECT_ENABLE:\n raise Http404\n return send_response(request, 'project/admin.html')\n\n\n@login_required\n@need_owner\ndef project_resources(request, name):\n rc = request.rc\n rc.pagename = 'Project resources usages'\n uri = '/p/' + name + '/admin'\n rc.navmenus = [{'uri': uri, 'txt': 'basic'}, {'uri': uri + 'resouces',\n 'txt': 'resources', 'choice': True}]\n if rc.project.status != consts.PROJECT_ENABLE:\n raise Http404\n return send_response(request, 'project/resources.html')\n\n\n@as_json\ndef get_members(request, name):\n project = q_get(Project, name=name)\n if project is None:\n return False\n resp = can_access(project, request.user)\n if resp is not None:\n return False\n members = q_gets(ProjectMember, project=project)\n return True, [m.json() for m in members]\n\n\ndef do_invite_op(request, name, op):\n if request.method != 'POST':\n return False\n project = q_get(Project, Q(name=name))\n if project is None:\n return False\n pm = q_get(ProjectMember, project=project, user=request.user)\n if pm is None:\n return False\n pm.member_type = op\n pm.save()\n if op == consts.PM_ACCEPT_INV:\n activity.join_member(project, request.user, request.user)\n return True\n\n\n@as_json\n@login_required\ndef do_accept(request, name):\n return do_invite_op(request, name, consts.PM_ACCEPT_INV)\n\n\n@as_json\n@login_required\ndef do_reject(request, name):\n return do_invite_op(request, name, consts.PM_REJECT_INV)\n\n\n@as_json\n@login_required\ndef do_exit(request, name):\n project = q_get(Project, name=name)\n if project is None:\n return False\n ProjectMember.objects.filter(project=project, user=request.user).delete()\n activity.leave_member(project, request.user, request.user)\n return True\n\n\n@login_required\n@need_owner\n@as_json\ndef del_member(request, name):\n if request.method != 'POST':\n return False\n uname = request.POST.get('u', '').strip()\n if len(uname) <= 0:\n return False\n rc = request.rc\n ProjectMember.objects.filter(project=rc.project, user=User.objects.\n filter(name=uname)).delete()\n return True\n\n\n@login_required\n@need_owner\n@as_json\ndef del_prj(request, name):\n if request.method != 'POST':\n return False\n del_name = name + '__DELETED__%s' % time.time()\n project = request.rc.project\n old_name = project.name\n project.name = del_name\n project.status = consts.PROJECT_MARK_DELETED\n project.save()\n svn.del_repos(old_name, del_name)\n return True, reverse('apps.user.views.view_user', args=[])\n\n\n@login_required\n@need_owner\n@as_json\ndef edit_prj(request, name):\n if request.method != 'POST':\n return False\n project = request.rc.project\n title = request.POST.get('t', '').strip()\n if len(title) <= 0:\n return False\n license = request.POST.get('l', '').strip()\n is_public = request.POST.get('pub', '0').strip()\n project.title = title\n project.license = license\n project.is_public = bool(int(is_public))\n project.save()\n return True\n",
"step-5": "# -*- coding: utf-8 -*-\n#\n# Copyright (C) 2011 Taobao .Inc\n# All rights reserved.\n#\n# This software is licensed as described in the file COPYING, which\n# you should have received as part of this distribution. The terms\n# are also available at http://code.taobao.org/license.html.\n#\n# This software consists of voluntary contributions made by many\n# individuals. For the exact contribution history, see the revision\n# history and logs, available at http://code.taobao.org/.\n\n\nfrom django.contrib.auth.decorators import login_required\nfrom django.core.urlresolvers import reverse\nfrom django.http import *\nfrom django import forms\nfrom django.db.models import Count,Sum,Q\n\nfrom taocode2.models import *\nfrom taocode2.helper.utils import *\nfrom taocode2.helper.func import wrap\nfrom taocode2.helper import consts\nfrom taocode2.apps.user import activity\nfrom taocode2.apps.repos import svn\n\nfrom taocode2.settings import *\nimport time\n\n\n__author__ = '[email protected]'\n\n\ndef build_prj_nav_menu(request, project, choice = None):\n uri = '/p/'+project.name\n\n navmenus = [{'uri': uri + '/src', 'txt':'source'},\n {'uri': uri + '/issues', 'txt':'issues'},\n {'uri': uri + '/wiki', 'txt':'wiki'},\n {'uri': uri + '/info', 'txt':'info'}]\n\n if project.owner == request.user:\n navmenus.append({'uri': uri + '/admin', 'txt':'admin'})\n\n if choice is None:\n navmenus[0]['choice'] = True\n else:\n for m in navmenus:\n if m['uri'].endswith(choice):\n m['choice'] = True\n return navmenus\n\ndef need_owner(view_func):\n def _wrapped_view(request, *args, **kwargs):\n rc = request.rc\n rc.project = q_get(Project, name=kwargs['name'],\n status = consts.PROJECT_ENABLE)\n rc.project_name = kwargs['name']\n\n if rc.project == None:\n raise Http404\n\n if rc.project.owner != request.user:\n if request.user.supper is False:\n return HttpResponseForbidden()\n return view_func(request, *args, **kwargs)\n return wrap(view_func, _wrapped_view)\n\ndef can_access(prj, user):\n if prj is None or prj.status != consts.PROJECT_ENABLE:\n raise Http404\n\n if prj.is_public:\n return None\n\n if user.is_authenticated() is False:\n return HttpResponseForbidden()\n\n if prj.owner != user:\n pm = q_get(ProjectMember, project = prj, user = user)\n if pm is None:\n return HttpResponseForbidden()\n return None\n\n\ndef can_write(prj, user):\n if prj is None or prj.status != consts.PROJECT_ENABLE:\n return False\n\n if user.is_authenticated() is False:\n return False\n\n if prj.owner != user:\n pm = q_get(ProjectMember, project = prj, user = user)\n if pm is None:\n return False\n return True\n \n\n@need_owner\n@as_json\n@login_required\ndef do_invite(request, name):\n if request.method != 'POST':\n return False\n uname = request.POST.get('u', '').strip()\n if len(uname) <= 0:\n return False\n \n user = q_get(User, Q(name=uname)|Q(email=uname))\n if user is None or user == request.user:\n return False\n\n rc = request.rc\n\n pm = q_get(ProjectMember,\n project=rc.project, user=user)\n \n if pm is not None:\n if pm.member_type != consts.PM_ACCEPT_INV:\n pm.member_type = consts.PM_SEND_INV\n pm.save()\n return True\n \n pm = ProjectMember()\n pm.project = rc.project\n pm.user = user\n pm.member_type = consts.PM_SEND_INV\n pm.save()\n\n return True\n\n@login_required\n@need_owner\ndef project_admin(request, name):\n rc = request.rc\n rc.pagename = name + ' admin'\n uri = request.META['PATH_INFO']\n \n #rc.navmenus = [{'uri': uri, 'txt':'basic', 'choice':True},\n # {'uri': uri + 'resources', 'txt':'resources'}]\n\n rc.navmenus = build_prj_nav_menu(request, rc.project, 'admin')\n\n res = []\n vls = q_gets(Issue, project = rc.project,\n status__in = (consts.ISSUE_OPEN, \n consts.ISSUE_CLOSED)).values('project').annotate(pc=Count('project'))\n res.append(['Issue Count', \n len(vls) > 0 and vls[0]['pc'] or 0])\n vls = q_gets(ProjectAttachment, project = rc.project,\n status = consts.FILE_ENABLE).values('project').annotate(pc=Count('project'))\n\n res.append(['Attachemts Count',\n len(vls) > 0 and vls[0]['pc'] or 0])\n \n vls = q_gets(ProjectAttachment,\n project = rc.project,\n status = consts.FILE_ENABLE).values('project').annotate(ps=Sum('size'))\n \n si = (len(vls) > 0 and vls[0]['ps'] or 0) / (1024*1024.0)\n \n res.append(['Attachemts Total Size','%.4s MB'%si])\n\n r,out, err = exec_cmd(['du','-sbh', os.path.join(settings.REPOS_ROOT, name)])\n res.append(['Repository Usage', r != 0 and '0.0 MB' or out.split()[0]])\n\n rc.res = res\n\n rc.licenses = map(lambda x:x[0], consts.LICENSES)\n if rc.project.status != consts.PROJECT_ENABLE:\n raise Http404\n\n return send_response(request, 'project/admin.html')\n\n\n@login_required\n@need_owner\ndef project_resources(request, name):\n rc = request.rc\n rc.pagename = 'Project resources usages'\n uri = '/p/'+name+'/admin'\n \n rc.navmenus = [{'uri': uri, 'txt':'basic'},\n {'uri': uri + 'resouces',\n 'txt':'resources', 'choice':True}]\n\n if rc.project.status != consts.PROJECT_ENABLE:\n raise Http404\n\n return send_response(request, 'project/resources.html')\n\n\n@as_json\ndef get_members(request, name):\n project = q_get(Project, name=name)\n if project is None:\n return False\n \n resp = can_access(project, request.user)\n if resp is not None:\n return False\n\n members = q_gets(ProjectMember, project=project) \n return (True, [m.json() for m in members])\n\ndef do_invite_op(request, name, op):\n if request.method != 'POST':\n return False\n\n project = q_get(Project, Q(name=name))\n\n if project is None:\n return False\n pm = q_get(ProjectMember, project=project, user=request.user)\n\n if pm is None:\n return False\n\n pm.member_type = op\n pm.save()\n\n if op == consts.PM_ACCEPT_INV:\n activity.join_member(project, request.user, request.user)\n\n return True\n\n@as_json\n@login_required\ndef do_accept(request, name):\n return do_invite_op(request, name, \n consts.PM_ACCEPT_INV)\n\n@as_json\n@login_required\ndef do_reject(request, name):\n return do_invite_op(request, name,\n consts.PM_REJECT_INV)\n\n@as_json\n@login_required\ndef do_exit(request, name):\n \n project = q_get(Project, name = name)\n \n if project is None:\n return False\n \n ProjectMember.objects.filter(project = project,\n user = request.user).delete()\n\n activity.leave_member(project, request.user, request.user)\n return True\n\n@login_required\n@need_owner\n@as_json\ndef del_member(request, name):\n if request.method != 'POST':\n return False\n \n uname = request.POST.get('u', '').strip()\n if len(uname) <= 0:\n return False\n\n rc = request.rc\n\n ProjectMember.objects.filter(project = rc.project,\n user = User.objects.filter(name=uname)).delete()\n return True\n\n\n@login_required\n@need_owner\n@as_json\ndef del_prj(request, name):\n if request.method != 'POST':\n return False\n \n del_name = name + '__DELETED__%s'%time.time()\n\n project = request.rc.project\n old_name = project.name\n\n project.name = del_name\n project.status = consts.PROJECT_MARK_DELETED\n project.save()\n \n svn.del_repos(old_name, del_name)\n \n return (True, reverse('apps.user.views.view_user', args=[]))\n\n@login_required\n@need_owner\n@as_json\ndef edit_prj(request, name):\n if request.method != 'POST':\n return False\n \n project = request.rc.project\n title = request.POST.get('t','').strip()\n \n if len(title) <= 0:\n return False\n \n license = request.POST.get('l','').strip()\n is_public = request.POST.get('pub','0').strip()\n project.title = title\n project.license = license\n project.is_public = bool(int(is_public))\n project.save()\n\n return True\n\n\n",
"step-ids": [
7,
14,
16,
17,
18
]
}
|
[
7,
14,
16,
17,
18
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
curses.noecho()
curses.cbreak()
stdscr.keypad(True)
curses.curs_set(0)
<|reserved_special_token_0|>
window.keypad(1)
window.timeout(100)
<|reserved_special_token_0|>
window.addch(int(food[0]), int(food[1]), curses.ACS_PI)
<|reserved_special_token_0|>
while True:
next_key = window.getch()
key = key if next_key == -1 else next_key
if snake[0][0] in [0, height] or snake[0][1] in [0, width] or snake[0
] in snake[1:]:
curses.endwin()
quit()
new_head = [snake[0][0], snake[0][1]]
if key == curses.KEY_DOWN:
new_head[0] += 1
if key == curses.KEY_UP:
new_head[0] -= 1
if key == curses.KEY_LEFT:
new_head[1] -= 1
if key == curses.KEY_RIGHT:
new_head[1] += 1
snake.insert(0, new_head)
if snake[0] == food:
food = None
while food is None:
nf = [random.randint(1, height - 1), random.randint(1, width - 1)]
food = nf if nf not in snake else None
window.addch(food[0], food[1], curses.ACS_PI)
else:
tail = snake.pop()
window.addch(int(tail[0]), int(tail[1]), ' ')
window.addch(int(snake[0][0]), int(snake[0][1]), curses.ACS_CKBOARD)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
stdscr.keypad(True)
curses.curs_set(0)
height, width = stdscr.getmaxyx()
window = curses.newwin(height, width, 0, 0)
window.keypad(1)
window.timeout(100)
snk_x = width / 4
snk_y = height / 2
snake = [[snk_y, snk_x], [snk_y, snk_x - 1], [snk_y, snk_x - 2]]
food = [height / 2, width / 2]
window.addch(int(food[0]), int(food[1]), curses.ACS_PI)
key = curses.KEY_RIGHT
while True:
next_key = window.getch()
key = key if next_key == -1 else next_key
if snake[0][0] in [0, height] or snake[0][1] in [0, width] or snake[0
] in snake[1:]:
curses.endwin()
quit()
new_head = [snake[0][0], snake[0][1]]
if key == curses.KEY_DOWN:
new_head[0] += 1
if key == curses.KEY_UP:
new_head[0] -= 1
if key == curses.KEY_LEFT:
new_head[1] -= 1
if key == curses.KEY_RIGHT:
new_head[1] += 1
snake.insert(0, new_head)
if snake[0] == food:
food = None
while food is None:
nf = [random.randint(1, height - 1), random.randint(1, width - 1)]
food = nf if nf not in snake else None
window.addch(food[0], food[1], curses.ACS_PI)
else:
tail = snake.pop()
window.addch(int(tail[0]), int(tail[1]), ' ')
window.addch(int(snake[0][0]), int(snake[0][1]), curses.ACS_CKBOARD)
<|reserved_special_token_1|>
import random
import curses
stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
stdscr.keypad(True)
curses.curs_set(0)
height, width = stdscr.getmaxyx()
window = curses.newwin(height, width, 0, 0)
window.keypad(1)
window.timeout(100)
snk_x = width / 4
snk_y = height / 2
snake = [[snk_y, snk_x], [snk_y, snk_x - 1], [snk_y, snk_x - 2]]
food = [height / 2, width / 2]
window.addch(int(food[0]), int(food[1]), curses.ACS_PI)
key = curses.KEY_RIGHT
while True:
next_key = window.getch()
key = key if next_key == -1 else next_key
if snake[0][0] in [0, height] or snake[0][1] in [0, width] or snake[0
] in snake[1:]:
curses.endwin()
quit()
new_head = [snake[0][0], snake[0][1]]
if key == curses.KEY_DOWN:
new_head[0] += 1
if key == curses.KEY_UP:
new_head[0] -= 1
if key == curses.KEY_LEFT:
new_head[1] -= 1
if key == curses.KEY_RIGHT:
new_head[1] += 1
snake.insert(0, new_head)
if snake[0] == food:
food = None
while food is None:
nf = [random.randint(1, height - 1), random.randint(1, width - 1)]
food = nf if nf not in snake else None
window.addch(food[0], food[1], curses.ACS_PI)
else:
tail = snake.pop()
window.addch(int(tail[0]), int(tail[1]), ' ')
window.addch(int(snake[0][0]), int(snake[0][1]), curses.ACS_CKBOARD)
<|reserved_special_token_1|>
import random
# library to create window in the terminal
import curses
# initialized curses by returning a window object
stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
stdscr.keypad(True)
curses.curs_set(0)
height, width = stdscr.getmaxyx()
# create a new window of a given size
window = curses.newwin(height, width, 0, 0)
window.keypad(1)
window.timeout(100)
# snake's form
snk_x = width/4
snk_y = height/2
# initialize snake's size to 3
snake = [
[snk_y, snk_x],
[snk_y, snk_x-1],
[snk_y, snk_x-2]
]
# food's size
food = [height/2, width/2]
# add first food in the window
window.addch(int(food[0]), int(food[1]), curses.ACS_PI)
# snake initializes direction to right
key = curses.KEY_RIGHT
# main of snake game
while True:
next_key = window.getch()
key = key if next_key == -1 else next_key
if snake[0][0] in [0, height] or snake[0][1] in [0, width] or snake[0] in snake[1:]:
curses.endwin()
quit()
new_head = [snake[0][0], snake[0][1]]
if key == curses.KEY_DOWN:
new_head[0] += 1
if key == curses.KEY_UP:
new_head[0] -= 1
if key == curses.KEY_LEFT:
new_head[1] -= 1
if key == curses.KEY_RIGHT:
new_head[1] += 1
snake.insert(0, new_head)
if snake[0] == food:
food = None
while food is None:
nf = [ random.randint(1, height-1), random.randint(1, width-1)]
food = nf if nf not in snake else None
window.addch(food[0], food[1], curses.ACS_PI)
else:
tail = snake.pop()
window.addch(int(tail[0]), int(tail[1]), ' ')
window.addch(int(snake[0][0]), int(snake[0][1]), curses.ACS_CKBOARD)
|
flexible
|
{
"blob_id": "153d37b58a10847aae1fa7dbec4c7576c3d97fb2",
"index": 3407,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncurses.noecho()\ncurses.cbreak()\nstdscr.keypad(True)\ncurses.curs_set(0)\n<mask token>\nwindow.keypad(1)\nwindow.timeout(100)\n<mask token>\nwindow.addch(int(food[0]), int(food[1]), curses.ACS_PI)\n<mask token>\nwhile True:\n next_key = window.getch()\n key = key if next_key == -1 else next_key\n if snake[0][0] in [0, height] or snake[0][1] in [0, width] or snake[0\n ] in snake[1:]:\n curses.endwin()\n quit()\n new_head = [snake[0][0], snake[0][1]]\n if key == curses.KEY_DOWN:\n new_head[0] += 1\n if key == curses.KEY_UP:\n new_head[0] -= 1\n if key == curses.KEY_LEFT:\n new_head[1] -= 1\n if key == curses.KEY_RIGHT:\n new_head[1] += 1\n snake.insert(0, new_head)\n if snake[0] == food:\n food = None\n while food is None:\n nf = [random.randint(1, height - 1), random.randint(1, width - 1)]\n food = nf if nf not in snake else None\n window.addch(food[0], food[1], curses.ACS_PI)\n else:\n tail = snake.pop()\n window.addch(int(tail[0]), int(tail[1]), ' ')\n window.addch(int(snake[0][0]), int(snake[0][1]), curses.ACS_CKBOARD)\n",
"step-3": "<mask token>\nstdscr = curses.initscr()\ncurses.noecho()\ncurses.cbreak()\nstdscr.keypad(True)\ncurses.curs_set(0)\nheight, width = stdscr.getmaxyx()\nwindow = curses.newwin(height, width, 0, 0)\nwindow.keypad(1)\nwindow.timeout(100)\nsnk_x = width / 4\nsnk_y = height / 2\nsnake = [[snk_y, snk_x], [snk_y, snk_x - 1], [snk_y, snk_x - 2]]\nfood = [height / 2, width / 2]\nwindow.addch(int(food[0]), int(food[1]), curses.ACS_PI)\nkey = curses.KEY_RIGHT\nwhile True:\n next_key = window.getch()\n key = key if next_key == -1 else next_key\n if snake[0][0] in [0, height] or snake[0][1] in [0, width] or snake[0\n ] in snake[1:]:\n curses.endwin()\n quit()\n new_head = [snake[0][0], snake[0][1]]\n if key == curses.KEY_DOWN:\n new_head[0] += 1\n if key == curses.KEY_UP:\n new_head[0] -= 1\n if key == curses.KEY_LEFT:\n new_head[1] -= 1\n if key == curses.KEY_RIGHT:\n new_head[1] += 1\n snake.insert(0, new_head)\n if snake[0] == food:\n food = None\n while food is None:\n nf = [random.randint(1, height - 1), random.randint(1, width - 1)]\n food = nf if nf not in snake else None\n window.addch(food[0], food[1], curses.ACS_PI)\n else:\n tail = snake.pop()\n window.addch(int(tail[0]), int(tail[1]), ' ')\n window.addch(int(snake[0][0]), int(snake[0][1]), curses.ACS_CKBOARD)\n",
"step-4": "import random\nimport curses\nstdscr = curses.initscr()\ncurses.noecho()\ncurses.cbreak()\nstdscr.keypad(True)\ncurses.curs_set(0)\nheight, width = stdscr.getmaxyx()\nwindow = curses.newwin(height, width, 0, 0)\nwindow.keypad(1)\nwindow.timeout(100)\nsnk_x = width / 4\nsnk_y = height / 2\nsnake = [[snk_y, snk_x], [snk_y, snk_x - 1], [snk_y, snk_x - 2]]\nfood = [height / 2, width / 2]\nwindow.addch(int(food[0]), int(food[1]), curses.ACS_PI)\nkey = curses.KEY_RIGHT\nwhile True:\n next_key = window.getch()\n key = key if next_key == -1 else next_key\n if snake[0][0] in [0, height] or snake[0][1] in [0, width] or snake[0\n ] in snake[1:]:\n curses.endwin()\n quit()\n new_head = [snake[0][0], snake[0][1]]\n if key == curses.KEY_DOWN:\n new_head[0] += 1\n if key == curses.KEY_UP:\n new_head[0] -= 1\n if key == curses.KEY_LEFT:\n new_head[1] -= 1\n if key == curses.KEY_RIGHT:\n new_head[1] += 1\n snake.insert(0, new_head)\n if snake[0] == food:\n food = None\n while food is None:\n nf = [random.randint(1, height - 1), random.randint(1, width - 1)]\n food = nf if nf not in snake else None\n window.addch(food[0], food[1], curses.ACS_PI)\n else:\n tail = snake.pop()\n window.addch(int(tail[0]), int(tail[1]), ' ')\n window.addch(int(snake[0][0]), int(snake[0][1]), curses.ACS_CKBOARD)\n",
"step-5": "import random\r\n\r\n# library to create window in the terminal\r\nimport curses \r\n\r\n# initialized curses by returning a window object\r\nstdscr = curses.initscr()\r\ncurses.noecho()\r\ncurses.cbreak()\r\nstdscr.keypad(True)\r\ncurses.curs_set(0)\r\nheight, width = stdscr.getmaxyx()\r\n\r\n# create a new window of a given size\r\nwindow = curses.newwin(height, width, 0, 0)\r\nwindow.keypad(1)\r\nwindow.timeout(100)\r\n\r\n# snake's form\r\nsnk_x = width/4\r\nsnk_y = height/2\r\n\r\n# initialize snake's size to 3\r\nsnake = [\r\n [snk_y, snk_x],\r\n [snk_y, snk_x-1],\r\n [snk_y, snk_x-2]\r\n]\r\n\r\n# food's size\r\nfood = [height/2, width/2]\r\n\r\n# add first food in the window\r\nwindow.addch(int(food[0]), int(food[1]), curses.ACS_PI)\r\n\r\n# snake initializes direction to right\r\nkey = curses.KEY_RIGHT\r\n\r\n# main of snake game \r\nwhile True:\r\n next_key = window.getch()\r\n key = key if next_key == -1 else next_key\r\n\r\n if snake[0][0] in [0, height] or snake[0][1] in [0, width] or snake[0] in snake[1:]:\r\n curses.endwin()\r\n quit()\r\n\r\n new_head = [snake[0][0], snake[0][1]]\r\n\r\n if key == curses.KEY_DOWN:\r\n new_head[0] += 1\r\n if key == curses.KEY_UP:\r\n new_head[0] -= 1\r\n if key == curses.KEY_LEFT:\r\n new_head[1] -= 1\r\n if key == curses.KEY_RIGHT:\r\n new_head[1] += 1\r\n\r\n snake.insert(0, new_head)\r\n\r\n if snake[0] == food:\r\n food = None\r\n while food is None:\r\n nf = [ random.randint(1, height-1), random.randint(1, width-1)]\r\n food = nf if nf not in snake else None\r\n window.addch(food[0], food[1], curses.ACS_PI)\r\n else:\r\n tail = snake.pop()\r\n window.addch(int(tail[0]), int(tail[1]), ' ')\r\n \r\n window.addch(int(snake[0][0]), int(snake[0][1]), curses.ACS_CKBOARD)\r\n ",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class test_A1(unittest.TestCase):
def setUp(self):
self.security1 = security.Security('XXX-1234-ABCD-1234', None)
self.security2 = security.Security(None, 'kkklas8882kk23nllfjj88290')
self.security3 = security.Security('XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290')
self.part_check1 = part_manager.Part_Manager('1233', '2')
self.part_check2 = part_manager.Part_Manager(None, '5')
self.part_check3 = part_manager.Part_Manager('2222', None)
self.delivery1 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', 'Halifax', 'NS', 'B3J2K9')
self.delivery2 = part_manager.DeliveryAddress(None, 'South Park St',
'Halifax', 'NS', 'B3J2K9')
self.delivery3 = part_manager.DeliveryAddress('Mr. Jadeja', None,
'Halifax', 'NS', 'B3J2K9')
self.delivery4 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', None, 'NS', 'B3J2K9')
self.delivery5 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', 'Halifax', None, 'B3J2K9')
self.delivery6 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', 'Halifax', 'NS', None)
self.auth1 = security.Security('FAKEDEALER', 'FAKEACCEESKEY')
self.auth2 = security.Security('XXX-1111-ABCD-1111',
'abcd123wxyz456qwerty78901')
self.auth3 = security.Security('XXX-2222-ABCD-2222',
'kkklas8882kk23nllfjj88292')
self.part_status1 = part_manager.Part_Manager(['1234', '1111',
'2222', '3333', '4444', 'fake_part_number'], ['1', '2', '3',
'4', '5', '6'])
def test_dealerCheck(self):
self.assertEqual(self.security1.validate_dealer(),
'Invalid Input XML Response Error: in Dealer Access Key')
self.assertEqual(self.security2.validate_dealer(),
'Invalid Input XML Response Error: in Dealer Id')
self.assertEqual(self.security3.validate_dealer(),
'Dealer details validated')
def test_dealer_auth(self):
self.assertEqual(self.auth1.isDealerAuthorized(),
'dealer not authorized.')
self.assertEqual(self.auth2.isDealerAuthorized(),
'dealer not authorized.')
self.assertEqual(self.auth3.isDealerAuthorized(),
'dealer authenticated')
<|reserved_special_token_0|>
def test_delivery(self):
self.assertEqual(self.delivery1.validate_delivery(),
'Delivery Details are good')
self.assertEqual(self.delivery2.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery3.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery4.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery5.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery6.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
def test_part_status_check(self):
self.assertEqual(self.part_status1.
SubmitPartForManufactureAndDelivery(), ['success',
'out of stock', 'no longer manufactured', 'invalid part',
'success', 'Invalid Part'])
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class test_A1(unittest.TestCase):
def setUp(self):
self.security1 = security.Security('XXX-1234-ABCD-1234', None)
self.security2 = security.Security(None, 'kkklas8882kk23nllfjj88290')
self.security3 = security.Security('XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290')
self.part_check1 = part_manager.Part_Manager('1233', '2')
self.part_check2 = part_manager.Part_Manager(None, '5')
self.part_check3 = part_manager.Part_Manager('2222', None)
self.delivery1 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', 'Halifax', 'NS', 'B3J2K9')
self.delivery2 = part_manager.DeliveryAddress(None, 'South Park St',
'Halifax', 'NS', 'B3J2K9')
self.delivery3 = part_manager.DeliveryAddress('Mr. Jadeja', None,
'Halifax', 'NS', 'B3J2K9')
self.delivery4 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', None, 'NS', 'B3J2K9')
self.delivery5 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', 'Halifax', None, 'B3J2K9')
self.delivery6 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', 'Halifax', 'NS', None)
self.auth1 = security.Security('FAKEDEALER', 'FAKEACCEESKEY')
self.auth2 = security.Security('XXX-1111-ABCD-1111',
'abcd123wxyz456qwerty78901')
self.auth3 = security.Security('XXX-2222-ABCD-2222',
'kkklas8882kk23nllfjj88292')
self.part_status1 = part_manager.Part_Manager(['1234', '1111',
'2222', '3333', '4444', 'fake_part_number'], ['1', '2', '3',
'4', '5', '6'])
def test_dealerCheck(self):
self.assertEqual(self.security1.validate_dealer(),
'Invalid Input XML Response Error: in Dealer Access Key')
self.assertEqual(self.security2.validate_dealer(),
'Invalid Input XML Response Error: in Dealer Id')
self.assertEqual(self.security3.validate_dealer(),
'Dealer details validated')
def test_dealer_auth(self):
self.assertEqual(self.auth1.isDealerAuthorized(),
'dealer not authorized.')
self.assertEqual(self.auth2.isDealerAuthorized(),
'dealer not authorized.')
self.assertEqual(self.auth3.isDealerAuthorized(),
'dealer authenticated')
def test_partsCheck(self):
self.assertEqual(self.part_check1.validate_parts(),
'Part Number and Quantity are good.')
self.assertEqual(self.part_check2.validate_parts(),
'Invalid Input XML Response: Error in Part number')
self.assertEqual(self.part_check3.validate_parts(),
'Invalid Input XML Response: Error in Quantity')
def test_delivery(self):
self.assertEqual(self.delivery1.validate_delivery(),
'Delivery Details are good')
self.assertEqual(self.delivery2.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery3.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery4.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery5.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery6.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
def test_part_status_check(self):
self.assertEqual(self.part_status1.
SubmitPartForManufactureAndDelivery(), ['success',
'out of stock', 'no longer manufactured', 'invalid part',
'success', 'Invalid Part'])
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class test_A1(unittest.TestCase):
def setUp(self):
self.security1 = security.Security('XXX-1234-ABCD-1234', None)
self.security2 = security.Security(None, 'kkklas8882kk23nllfjj88290')
self.security3 = security.Security('XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290')
self.part_check1 = part_manager.Part_Manager('1233', '2')
self.part_check2 = part_manager.Part_Manager(None, '5')
self.part_check3 = part_manager.Part_Manager('2222', None)
self.delivery1 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', 'Halifax', 'NS', 'B3J2K9')
self.delivery2 = part_manager.DeliveryAddress(None, 'South Park St',
'Halifax', 'NS', 'B3J2K9')
self.delivery3 = part_manager.DeliveryAddress('Mr. Jadeja', None,
'Halifax', 'NS', 'B3J2K9')
self.delivery4 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', None, 'NS', 'B3J2K9')
self.delivery5 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', 'Halifax', None, 'B3J2K9')
self.delivery6 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', 'Halifax', 'NS', None)
self.auth1 = security.Security('FAKEDEALER', 'FAKEACCEESKEY')
self.auth2 = security.Security('XXX-1111-ABCD-1111',
'abcd123wxyz456qwerty78901')
self.auth3 = security.Security('XXX-2222-ABCD-2222',
'kkklas8882kk23nllfjj88292')
self.part_status1 = part_manager.Part_Manager(['1234', '1111',
'2222', '3333', '4444', 'fake_part_number'], ['1', '2', '3',
'4', '5', '6'])
def test_dealerCheck(self):
self.assertEqual(self.security1.validate_dealer(),
'Invalid Input XML Response Error: in Dealer Access Key')
self.assertEqual(self.security2.validate_dealer(),
'Invalid Input XML Response Error: in Dealer Id')
self.assertEqual(self.security3.validate_dealer(),
'Dealer details validated')
def test_dealer_auth(self):
self.assertEqual(self.auth1.isDealerAuthorized(),
'dealer not authorized.')
self.assertEqual(self.auth2.isDealerAuthorized(),
'dealer not authorized.')
self.assertEqual(self.auth3.isDealerAuthorized(),
'dealer authenticated')
def test_partsCheck(self):
self.assertEqual(self.part_check1.validate_parts(),
'Part Number and Quantity are good.')
self.assertEqual(self.part_check2.validate_parts(),
'Invalid Input XML Response: Error in Part number')
self.assertEqual(self.part_check3.validate_parts(),
'Invalid Input XML Response: Error in Quantity')
def test_delivery(self):
self.assertEqual(self.delivery1.validate_delivery(),
'Delivery Details are good')
self.assertEqual(self.delivery2.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery3.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery4.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery5.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery6.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
def test_part_status_check(self):
self.assertEqual(self.part_status1.
SubmitPartForManufactureAndDelivery(), ['success',
'out of stock', 'no longer manufactured', 'invalid part',
'success', 'Invalid Part'])
def test_main_function(self):
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],
['2', '25']),
'Dealer is authorized, check the response in output.xml')
self.assertEqual(A1.main_function([None,
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],
['2', '25']), 'Invalid Input XML Response Error: in Dealer Id')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', None], [
'Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'],
['1234', '5678'], ['2', '25']),
'Invalid Input XML Response Error: in Dealer Access Key')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], [None, '35 Streetname', 'Halifax',
'NS', 'B2T1A4'], ['1234', '5678'], ['2', '25']),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', None,
'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'], ['2', '25']),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', None, 'NS', 'B2T1A4'], ['1234', '5678'], ['2',
'25']), 'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', 'Halifax', None, 'B2T1A4'], ['1234', '5678'],
['2', '25']),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', 'Halifax', 'NS', None], ['1234', '5678'], ['2',
'25']), 'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['0000', '5678'],
['2', '25']),
'Dealer is authorized, check the response in output.xml')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],
['0', '25']), 'Invalid Input XML Response: Error in Quantity')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class test_A1(unittest.TestCase):
def setUp(self):
self.security1 = security.Security('XXX-1234-ABCD-1234', None)
self.security2 = security.Security(None, 'kkklas8882kk23nllfjj88290')
self.security3 = security.Security('XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290')
self.part_check1 = part_manager.Part_Manager('1233', '2')
self.part_check2 = part_manager.Part_Manager(None, '5')
self.part_check3 = part_manager.Part_Manager('2222', None)
self.delivery1 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', 'Halifax', 'NS', 'B3J2K9')
self.delivery2 = part_manager.DeliveryAddress(None, 'South Park St',
'Halifax', 'NS', 'B3J2K9')
self.delivery3 = part_manager.DeliveryAddress('Mr. Jadeja', None,
'Halifax', 'NS', 'B3J2K9')
self.delivery4 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', None, 'NS', 'B3J2K9')
self.delivery5 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', 'Halifax', None, 'B3J2K9')
self.delivery6 = part_manager.DeliveryAddress('Mr. Jadeja',
'South Park St', 'Halifax', 'NS', None)
self.auth1 = security.Security('FAKEDEALER', 'FAKEACCEESKEY')
self.auth2 = security.Security('XXX-1111-ABCD-1111',
'abcd123wxyz456qwerty78901')
self.auth3 = security.Security('XXX-2222-ABCD-2222',
'kkklas8882kk23nllfjj88292')
self.part_status1 = part_manager.Part_Manager(['1234', '1111',
'2222', '3333', '4444', 'fake_part_number'], ['1', '2', '3',
'4', '5', '6'])
def test_dealerCheck(self):
self.assertEqual(self.security1.validate_dealer(),
'Invalid Input XML Response Error: in Dealer Access Key')
self.assertEqual(self.security2.validate_dealer(),
'Invalid Input XML Response Error: in Dealer Id')
self.assertEqual(self.security3.validate_dealer(),
'Dealer details validated')
def test_dealer_auth(self):
self.assertEqual(self.auth1.isDealerAuthorized(),
'dealer not authorized.')
self.assertEqual(self.auth2.isDealerAuthorized(),
'dealer not authorized.')
self.assertEqual(self.auth3.isDealerAuthorized(),
'dealer authenticated')
def test_partsCheck(self):
self.assertEqual(self.part_check1.validate_parts(),
'Part Number and Quantity are good.')
self.assertEqual(self.part_check2.validate_parts(),
'Invalid Input XML Response: Error in Part number')
self.assertEqual(self.part_check3.validate_parts(),
'Invalid Input XML Response: Error in Quantity')
def test_delivery(self):
self.assertEqual(self.delivery1.validate_delivery(),
'Delivery Details are good')
self.assertEqual(self.delivery2.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery3.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery4.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery5.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(self.delivery6.validate_delivery(),
'Invalid Input XML Response: Error in Delivery Details')
def test_part_status_check(self):
self.assertEqual(self.part_status1.
SubmitPartForManufactureAndDelivery(), ['success',
'out of stock', 'no longer manufactured', 'invalid part',
'success', 'Invalid Part'])
def test_main_function(self):
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],
['2', '25']),
'Dealer is authorized, check the response in output.xml')
self.assertEqual(A1.main_function([None,
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],
['2', '25']), 'Invalid Input XML Response Error: in Dealer Id')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', None], [
'Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'],
['1234', '5678'], ['2', '25']),
'Invalid Input XML Response Error: in Dealer Access Key')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], [None, '35 Streetname', 'Halifax',
'NS', 'B2T1A4'], ['1234', '5678'], ['2', '25']),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', None,
'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'], ['2', '25']),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', None, 'NS', 'B2T1A4'], ['1234', '5678'], ['2',
'25']), 'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', 'Halifax', None, 'B2T1A4'], ['1234', '5678'],
['2', '25']),
'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', 'Halifax', 'NS', None], ['1234', '5678'], ['2',
'25']), 'Invalid Input XML Response: Error in Delivery Details')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['0000', '5678'],
['2', '25']),
'Dealer is authorized, check the response in output.xml')
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',
'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',
'35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],
['0', '25']), 'Invalid Input XML Response: Error in Quantity')
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
import unittest
import A1
import part_manager
import security
class test_A1(unittest.TestCase):
# ----------------------------------- set up the mock data for test cases -----------------------------------
def setUp(self):
self.security1 = security.Security("XXX-1234-ABCD-1234", None)
self.security2 = security.Security(None, "kkklas8882kk23nllfjj88290")
self.security3 = security.Security("XXX-1234-ABCD-1234", "kkklas8882kk23nllfjj88290")
self.part_check1 = part_manager.Part_Manager("1233", "2")
self.part_check2 = part_manager.Part_Manager(None, "5")
self.part_check3 = part_manager.Part_Manager("2222", None)
self.delivery1 = part_manager.DeliveryAddress("Mr. Jadeja", "South Park St", "Halifax", "NS", "B3J2K9")
self.delivery2 = part_manager.DeliveryAddress(None, "South Park St", "Halifax", "NS", "B3J2K9")
self.delivery3 = part_manager.DeliveryAddress("Mr. Jadeja", None, "Halifax", "NS", "B3J2K9")
self.delivery4 = part_manager.DeliveryAddress("Mr. Jadeja", "South Park St", None, "NS", "B3J2K9")
self.delivery5 = part_manager.DeliveryAddress("Mr. Jadeja", "South Park St", "Halifax", None, "B3J2K9")
self.delivery6 = part_manager.DeliveryAddress("Mr. Jadeja", "South Park St", "Halifax", "NS", None)
self.auth1 = security.Security("FAKEDEALER", "FAKEACCEESKEY")
self.auth2 = security.Security("XXX-1111-ABCD-1111", "abcd123wxyz456qwerty78901")
self.auth3 = security.Security("XXX-2222-ABCD-2222", "kkklas8882kk23nllfjj88292")
self.part_status1 = part_manager.Part_Manager(["1234", "1111", "2222", "3333", "4444", "fake_part_number"],
["1","2","3","4","5","6"])
# ----------------------------------- Class: Security -----------------------------------
# -----------------------------------------------------------------------------------------
# ------------------------------ Method: validate_dealer -----------------------------
def test_dealerCheck(self):
self.assertEqual(self.security1.validate_dealer(), "Invalid Input XML Response Error: in Dealer Access Key")
self.assertEqual(self.security2.validate_dealer(), "Invalid Input XML Response Error: in Dealer Id")
self.assertEqual(self.security3.validate_dealer(), "Dealer details validated")
# ------------------------------ Method: isDealerAuthorized ---------------------------
def test_dealer_auth(self):
self.assertEqual(self.auth1.isDealerAuthorized(), "dealer not authorized.")
self.assertEqual(self.auth2.isDealerAuthorized(), "dealer not authorized.")
self.assertEqual(self.auth3.isDealerAuthorized(), "dealer authenticated")
# ----------------------------------- Class: part_manager --------------------------------
# ------------------------------------------------------------------------------------------
# ------------------------------ Method: validate_parts -------------------------------
def test_partsCheck(self):
self.assertEqual(self.part_check1.validate_parts(), "Part Number and Quantity are good.")
self.assertEqual(self.part_check2.validate_parts(), "Invalid Input XML Response: Error in Part number")
self.assertEqual(self.part_check3.validate_parts(), "Invalid Input XML Response: Error in Quantity")
# ------------------------------ Method: validate_delivery ----------------------------
def test_delivery(self):
self.assertEqual(self.delivery1.validate_delivery(), "Delivery Details are good")
self.assertEqual(self.delivery2.validate_delivery(), "Invalid Input XML Response: Error in Delivery Details")
self.assertEqual(self.delivery3.validate_delivery(), "Invalid Input XML Response: Error in Delivery Details")
self.assertEqual(self.delivery4.validate_delivery(), "Invalid Input XML Response: Error in Delivery Details")
self.assertEqual(self.delivery5.validate_delivery(), "Invalid Input XML Response: Error in Delivery Details")
self.assertEqual(self.delivery6.validate_delivery(), "Invalid Input XML Response: Error in Delivery Details")
# ------------------------------ Method: SubmitPartForManufactureAndDelivery -----------
def test_part_status_check(self):
self.assertEqual(self.part_status1.SubmitPartForManufactureAndDelivery(),
['success', 'out of stock', 'no longer manufactured', 'invalid part', 'success', 'Invalid Part'])
# ----------------------------------- Class: A1 -------------------------------------------
# -------------------------------------------------------------------------------------------
# ------------------------------ Method: main_function ---------------------------------
def test_main_function(self):
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'],
['1234', '5678'], ['2', '25']), "Dealer is authorized, check the response in output.xml")
self.assertEqual(A1.main_function([None, 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],
['2', '25']), "Invalid Input XML Response Error: in Dealer Id")
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', None], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'],
['1234', '5678'], ['2', '25']), "Invalid Input XML Response Error: in Dealer Access Key")
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], [None, '35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],
['2', '25']), "Invalid Input XML Response: Error in Delivery Details")
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', None, 'Halifax', 'NS', 'B2T1A4'],
['1234', '5678'], ['2', '25']), "Invalid Input XML Response: Error in Delivery Details")
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', None, 'NS', 'B2T1A4'],
['1234', '5678'], ['2', '25']), "Invalid Input XML Response: Error in Delivery Details")
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', None, 'B2T1A4'],
['1234', '5678'], ['2', '25']), "Invalid Input XML Response: Error in Delivery Details")
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', None],
['1234', '5678'], ['2', '25']), "Invalid Input XML Response: Error in Delivery Details")
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'],
["0000", '5678'], ['2', '25']), "Dealer is authorized, check the response in output.xml")
self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'],
['1234', '5678'], ['0', '25']), "Invalid Input XML Response: Error in Quantity")
if __name__ == '__main__':
unittest.main()
|
flexible
|
{
"blob_id": "2ba5cb1265090b42b9a4838b792a3e81b209ba1a",
"index": 3822,
"step-1": "<mask token>\n\n\nclass test_A1(unittest.TestCase):\n\n def setUp(self):\n self.security1 = security.Security('XXX-1234-ABCD-1234', None)\n self.security2 = security.Security(None, 'kkklas8882kk23nllfjj88290')\n self.security3 = security.Security('XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290')\n self.part_check1 = part_manager.Part_Manager('1233', '2')\n self.part_check2 = part_manager.Part_Manager(None, '5')\n self.part_check3 = part_manager.Part_Manager('2222', None)\n self.delivery1 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', 'Halifax', 'NS', 'B3J2K9')\n self.delivery2 = part_manager.DeliveryAddress(None, 'South Park St',\n 'Halifax', 'NS', 'B3J2K9')\n self.delivery3 = part_manager.DeliveryAddress('Mr. Jadeja', None,\n 'Halifax', 'NS', 'B3J2K9')\n self.delivery4 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', None, 'NS', 'B3J2K9')\n self.delivery5 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', 'Halifax', None, 'B3J2K9')\n self.delivery6 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', 'Halifax', 'NS', None)\n self.auth1 = security.Security('FAKEDEALER', 'FAKEACCEESKEY')\n self.auth2 = security.Security('XXX-1111-ABCD-1111',\n 'abcd123wxyz456qwerty78901')\n self.auth3 = security.Security('XXX-2222-ABCD-2222',\n 'kkklas8882kk23nllfjj88292')\n self.part_status1 = part_manager.Part_Manager(['1234', '1111',\n '2222', '3333', '4444', 'fake_part_number'], ['1', '2', '3',\n '4', '5', '6'])\n\n def test_dealerCheck(self):\n self.assertEqual(self.security1.validate_dealer(),\n 'Invalid Input XML Response Error: in Dealer Access Key')\n self.assertEqual(self.security2.validate_dealer(),\n 'Invalid Input XML Response Error: in Dealer Id')\n self.assertEqual(self.security3.validate_dealer(),\n 'Dealer details validated')\n\n def test_dealer_auth(self):\n self.assertEqual(self.auth1.isDealerAuthorized(),\n 'dealer not authorized.')\n self.assertEqual(self.auth2.isDealerAuthorized(),\n 'dealer not authorized.')\n self.assertEqual(self.auth3.isDealerAuthorized(),\n 'dealer authenticated')\n <mask token>\n\n def test_delivery(self):\n self.assertEqual(self.delivery1.validate_delivery(),\n 'Delivery Details are good')\n self.assertEqual(self.delivery2.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery3.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery4.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery5.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery6.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n\n def test_part_status_check(self):\n self.assertEqual(self.part_status1.\n SubmitPartForManufactureAndDelivery(), ['success',\n 'out of stock', 'no longer manufactured', 'invalid part',\n 'success', 'Invalid Part'])\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass test_A1(unittest.TestCase):\n\n def setUp(self):\n self.security1 = security.Security('XXX-1234-ABCD-1234', None)\n self.security2 = security.Security(None, 'kkklas8882kk23nllfjj88290')\n self.security3 = security.Security('XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290')\n self.part_check1 = part_manager.Part_Manager('1233', '2')\n self.part_check2 = part_manager.Part_Manager(None, '5')\n self.part_check3 = part_manager.Part_Manager('2222', None)\n self.delivery1 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', 'Halifax', 'NS', 'B3J2K9')\n self.delivery2 = part_manager.DeliveryAddress(None, 'South Park St',\n 'Halifax', 'NS', 'B3J2K9')\n self.delivery3 = part_manager.DeliveryAddress('Mr. Jadeja', None,\n 'Halifax', 'NS', 'B3J2K9')\n self.delivery4 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', None, 'NS', 'B3J2K9')\n self.delivery5 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', 'Halifax', None, 'B3J2K9')\n self.delivery6 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', 'Halifax', 'NS', None)\n self.auth1 = security.Security('FAKEDEALER', 'FAKEACCEESKEY')\n self.auth2 = security.Security('XXX-1111-ABCD-1111',\n 'abcd123wxyz456qwerty78901')\n self.auth3 = security.Security('XXX-2222-ABCD-2222',\n 'kkklas8882kk23nllfjj88292')\n self.part_status1 = part_manager.Part_Manager(['1234', '1111',\n '2222', '3333', '4444', 'fake_part_number'], ['1', '2', '3',\n '4', '5', '6'])\n\n def test_dealerCheck(self):\n self.assertEqual(self.security1.validate_dealer(),\n 'Invalid Input XML Response Error: in Dealer Access Key')\n self.assertEqual(self.security2.validate_dealer(),\n 'Invalid Input XML Response Error: in Dealer Id')\n self.assertEqual(self.security3.validate_dealer(),\n 'Dealer details validated')\n\n def test_dealer_auth(self):\n self.assertEqual(self.auth1.isDealerAuthorized(),\n 'dealer not authorized.')\n self.assertEqual(self.auth2.isDealerAuthorized(),\n 'dealer not authorized.')\n self.assertEqual(self.auth3.isDealerAuthorized(),\n 'dealer authenticated')\n\n def test_partsCheck(self):\n self.assertEqual(self.part_check1.validate_parts(),\n 'Part Number and Quantity are good.')\n self.assertEqual(self.part_check2.validate_parts(),\n 'Invalid Input XML Response: Error in Part number')\n self.assertEqual(self.part_check3.validate_parts(),\n 'Invalid Input XML Response: Error in Quantity')\n\n def test_delivery(self):\n self.assertEqual(self.delivery1.validate_delivery(),\n 'Delivery Details are good')\n self.assertEqual(self.delivery2.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery3.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery4.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery5.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery6.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n\n def test_part_status_check(self):\n self.assertEqual(self.part_status1.\n SubmitPartForManufactureAndDelivery(), ['success',\n 'out of stock', 'no longer manufactured', 'invalid part',\n 'success', 'Invalid Part'])\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass test_A1(unittest.TestCase):\n\n def setUp(self):\n self.security1 = security.Security('XXX-1234-ABCD-1234', None)\n self.security2 = security.Security(None, 'kkklas8882kk23nllfjj88290')\n self.security3 = security.Security('XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290')\n self.part_check1 = part_manager.Part_Manager('1233', '2')\n self.part_check2 = part_manager.Part_Manager(None, '5')\n self.part_check3 = part_manager.Part_Manager('2222', None)\n self.delivery1 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', 'Halifax', 'NS', 'B3J2K9')\n self.delivery2 = part_manager.DeliveryAddress(None, 'South Park St',\n 'Halifax', 'NS', 'B3J2K9')\n self.delivery3 = part_manager.DeliveryAddress('Mr. Jadeja', None,\n 'Halifax', 'NS', 'B3J2K9')\n self.delivery4 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', None, 'NS', 'B3J2K9')\n self.delivery5 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', 'Halifax', None, 'B3J2K9')\n self.delivery6 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', 'Halifax', 'NS', None)\n self.auth1 = security.Security('FAKEDEALER', 'FAKEACCEESKEY')\n self.auth2 = security.Security('XXX-1111-ABCD-1111',\n 'abcd123wxyz456qwerty78901')\n self.auth3 = security.Security('XXX-2222-ABCD-2222',\n 'kkklas8882kk23nllfjj88292')\n self.part_status1 = part_manager.Part_Manager(['1234', '1111',\n '2222', '3333', '4444', 'fake_part_number'], ['1', '2', '3',\n '4', '5', '6'])\n\n def test_dealerCheck(self):\n self.assertEqual(self.security1.validate_dealer(),\n 'Invalid Input XML Response Error: in Dealer Access Key')\n self.assertEqual(self.security2.validate_dealer(),\n 'Invalid Input XML Response Error: in Dealer Id')\n self.assertEqual(self.security3.validate_dealer(),\n 'Dealer details validated')\n\n def test_dealer_auth(self):\n self.assertEqual(self.auth1.isDealerAuthorized(),\n 'dealer not authorized.')\n self.assertEqual(self.auth2.isDealerAuthorized(),\n 'dealer not authorized.')\n self.assertEqual(self.auth3.isDealerAuthorized(),\n 'dealer authenticated')\n\n def test_partsCheck(self):\n self.assertEqual(self.part_check1.validate_parts(),\n 'Part Number and Quantity are good.')\n self.assertEqual(self.part_check2.validate_parts(),\n 'Invalid Input XML Response: Error in Part number')\n self.assertEqual(self.part_check3.validate_parts(),\n 'Invalid Input XML Response: Error in Quantity')\n\n def test_delivery(self):\n self.assertEqual(self.delivery1.validate_delivery(),\n 'Delivery Details are good')\n self.assertEqual(self.delivery2.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery3.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery4.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery5.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery6.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n\n def test_part_status_check(self):\n self.assertEqual(self.part_status1.\n SubmitPartForManufactureAndDelivery(), ['success',\n 'out of stock', 'no longer manufactured', 'invalid part',\n 'success', 'Invalid Part'])\n\n def test_main_function(self):\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],\n ['2', '25']),\n 'Dealer is authorized, check the response in output.xml')\n self.assertEqual(A1.main_function([None,\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],\n ['2', '25']), 'Invalid Input XML Response Error: in Dealer Id')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', None], [\n 'Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'],\n ['1234', '5678'], ['2', '25']),\n 'Invalid Input XML Response Error: in Dealer Access Key')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], [None, '35 Streetname', 'Halifax',\n 'NS', 'B2T1A4'], ['1234', '5678'], ['2', '25']),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', None,\n 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'], ['2', '25']),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', None, 'NS', 'B2T1A4'], ['1234', '5678'], ['2',\n '25']), 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', 'Halifax', None, 'B2T1A4'], ['1234', '5678'],\n ['2', '25']),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', 'Halifax', 'NS', None], ['1234', '5678'], ['2',\n '25']), 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['0000', '5678'],\n ['2', '25']),\n 'Dealer is authorized, check the response in output.xml')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],\n ['0', '25']), 'Invalid Input XML Response: Error in Quantity')\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass test_A1(unittest.TestCase):\n\n def setUp(self):\n self.security1 = security.Security('XXX-1234-ABCD-1234', None)\n self.security2 = security.Security(None, 'kkklas8882kk23nllfjj88290')\n self.security3 = security.Security('XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290')\n self.part_check1 = part_manager.Part_Manager('1233', '2')\n self.part_check2 = part_manager.Part_Manager(None, '5')\n self.part_check3 = part_manager.Part_Manager('2222', None)\n self.delivery1 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', 'Halifax', 'NS', 'B3J2K9')\n self.delivery2 = part_manager.DeliveryAddress(None, 'South Park St',\n 'Halifax', 'NS', 'B3J2K9')\n self.delivery3 = part_manager.DeliveryAddress('Mr. Jadeja', None,\n 'Halifax', 'NS', 'B3J2K9')\n self.delivery4 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', None, 'NS', 'B3J2K9')\n self.delivery5 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', 'Halifax', None, 'B3J2K9')\n self.delivery6 = part_manager.DeliveryAddress('Mr. Jadeja',\n 'South Park St', 'Halifax', 'NS', None)\n self.auth1 = security.Security('FAKEDEALER', 'FAKEACCEESKEY')\n self.auth2 = security.Security('XXX-1111-ABCD-1111',\n 'abcd123wxyz456qwerty78901')\n self.auth3 = security.Security('XXX-2222-ABCD-2222',\n 'kkklas8882kk23nllfjj88292')\n self.part_status1 = part_manager.Part_Manager(['1234', '1111',\n '2222', '3333', '4444', 'fake_part_number'], ['1', '2', '3',\n '4', '5', '6'])\n\n def test_dealerCheck(self):\n self.assertEqual(self.security1.validate_dealer(),\n 'Invalid Input XML Response Error: in Dealer Access Key')\n self.assertEqual(self.security2.validate_dealer(),\n 'Invalid Input XML Response Error: in Dealer Id')\n self.assertEqual(self.security3.validate_dealer(),\n 'Dealer details validated')\n\n def test_dealer_auth(self):\n self.assertEqual(self.auth1.isDealerAuthorized(),\n 'dealer not authorized.')\n self.assertEqual(self.auth2.isDealerAuthorized(),\n 'dealer not authorized.')\n self.assertEqual(self.auth3.isDealerAuthorized(),\n 'dealer authenticated')\n\n def test_partsCheck(self):\n self.assertEqual(self.part_check1.validate_parts(),\n 'Part Number and Quantity are good.')\n self.assertEqual(self.part_check2.validate_parts(),\n 'Invalid Input XML Response: Error in Part number')\n self.assertEqual(self.part_check3.validate_parts(),\n 'Invalid Input XML Response: Error in Quantity')\n\n def test_delivery(self):\n self.assertEqual(self.delivery1.validate_delivery(),\n 'Delivery Details are good')\n self.assertEqual(self.delivery2.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery3.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery4.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery5.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(self.delivery6.validate_delivery(),\n 'Invalid Input XML Response: Error in Delivery Details')\n\n def test_part_status_check(self):\n self.assertEqual(self.part_status1.\n SubmitPartForManufactureAndDelivery(), ['success',\n 'out of stock', 'no longer manufactured', 'invalid part',\n 'success', 'Invalid Part'])\n\n def test_main_function(self):\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],\n ['2', '25']),\n 'Dealer is authorized, check the response in output.xml')\n self.assertEqual(A1.main_function([None,\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],\n ['2', '25']), 'Invalid Input XML Response Error: in Dealer Id')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', None], [\n 'Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'],\n ['1234', '5678'], ['2', '25']),\n 'Invalid Input XML Response Error: in Dealer Access Key')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], [None, '35 Streetname', 'Halifax',\n 'NS', 'B2T1A4'], ['1234', '5678'], ['2', '25']),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', None,\n 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'], ['2', '25']),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', None, 'NS', 'B2T1A4'], ['1234', '5678'], ['2',\n '25']), 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', 'Halifax', None, 'B2T1A4'], ['1234', '5678'],\n ['2', '25']),\n 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', 'Halifax', 'NS', None], ['1234', '5678'], ['2',\n '25']), 'Invalid Input XML Response: Error in Delivery Details')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['0000', '5678'],\n ['2', '25']),\n 'Dealer is authorized, check the response in output.xml')\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234',\n 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith',\n '35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],\n ['0', '25']), 'Invalid Input XML Response: Error in Quantity')\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "import unittest\nimport A1\nimport part_manager\nimport security\n\n\nclass test_A1(unittest.TestCase):\n \n# ----------------------------------- set up the mock data for test cases ----------------------------------- \n def setUp(self):\n self.security1 = security.Security(\"XXX-1234-ABCD-1234\", None)\n self.security2 = security.Security(None, \"kkklas8882kk23nllfjj88290\")\n self.security3 = security.Security(\"XXX-1234-ABCD-1234\", \"kkklas8882kk23nllfjj88290\")\n \n self.part_check1 = part_manager.Part_Manager(\"1233\", \"2\")\n self.part_check2 = part_manager.Part_Manager(None, \"5\")\n self.part_check3 = part_manager.Part_Manager(\"2222\", None)\n \n self.delivery1 = part_manager.DeliveryAddress(\"Mr. Jadeja\", \"South Park St\", \"Halifax\", \"NS\", \"B3J2K9\")\n self.delivery2 = part_manager.DeliveryAddress(None, \"South Park St\", \"Halifax\", \"NS\", \"B3J2K9\")\n self.delivery3 = part_manager.DeliveryAddress(\"Mr. Jadeja\", None, \"Halifax\", \"NS\", \"B3J2K9\")\n self.delivery4 = part_manager.DeliveryAddress(\"Mr. Jadeja\", \"South Park St\", None, \"NS\", \"B3J2K9\")\n self.delivery5 = part_manager.DeliveryAddress(\"Mr. Jadeja\", \"South Park St\", \"Halifax\", None, \"B3J2K9\")\n self.delivery6 = part_manager.DeliveryAddress(\"Mr. Jadeja\", \"South Park St\", \"Halifax\", \"NS\", None)\n \n self.auth1 = security.Security(\"FAKEDEALER\", \"FAKEACCEESKEY\")\n self.auth2 = security.Security(\"XXX-1111-ABCD-1111\", \"abcd123wxyz456qwerty78901\")\n self.auth3 = security.Security(\"XXX-2222-ABCD-2222\", \"kkklas8882kk23nllfjj88292\") \n\n self.part_status1 = part_manager.Part_Manager([\"1234\", \"1111\", \"2222\", \"3333\", \"4444\", \"fake_part_number\"], \n [\"1\",\"2\",\"3\",\"4\",\"5\",\"6\"]) \n\n \n# ----------------------------------- Class: Security ----------------------------------- \n# -----------------------------------------------------------------------------------------\n \n # ------------------------------ Method: validate_dealer ----------------------------- \n def test_dealerCheck(self):\n self.assertEqual(self.security1.validate_dealer(), \"Invalid Input XML Response Error: in Dealer Access Key\")\n self.assertEqual(self.security2.validate_dealer(), \"Invalid Input XML Response Error: in Dealer Id\")\n self.assertEqual(self.security3.validate_dealer(), \"Dealer details validated\")\n \n \n # ------------------------------ Method: isDealerAuthorized --------------------------- \n def test_dealer_auth(self):\n self.assertEqual(self.auth1.isDealerAuthorized(), \"dealer not authorized.\")\n self.assertEqual(self.auth2.isDealerAuthorized(), \"dealer not authorized.\")\n self.assertEqual(self.auth3.isDealerAuthorized(), \"dealer authenticated\")\n \n \n# ----------------------------------- Class: part_manager --------------------------------\n# ------------------------------------------------------------------------------------------\n \n # ------------------------------ Method: validate_parts ------------------------------- \n def test_partsCheck(self):\n self.assertEqual(self.part_check1.validate_parts(), \"Part Number and Quantity are good.\")\n self.assertEqual(self.part_check2.validate_parts(), \"Invalid Input XML Response: Error in Part number\")\n self.assertEqual(self.part_check3.validate_parts(), \"Invalid Input XML Response: Error in Quantity\")\n \n # ------------------------------ Method: validate_delivery ----------------------------\n def test_delivery(self):\n self.assertEqual(self.delivery1.validate_delivery(), \"Delivery Details are good\")\n self.assertEqual(self.delivery2.validate_delivery(), \"Invalid Input XML Response: Error in Delivery Details\")\n self.assertEqual(self.delivery3.validate_delivery(), \"Invalid Input XML Response: Error in Delivery Details\")\n self.assertEqual(self.delivery4.validate_delivery(), \"Invalid Input XML Response: Error in Delivery Details\")\n self.assertEqual(self.delivery5.validate_delivery(), \"Invalid Input XML Response: Error in Delivery Details\")\n self.assertEqual(self.delivery6.validate_delivery(), \"Invalid Input XML Response: Error in Delivery Details\")\n \n # ------------------------------ Method: SubmitPartForManufactureAndDelivery -----------\n def test_part_status_check(self):\n self.assertEqual(self.part_status1.SubmitPartForManufactureAndDelivery(), \n ['success', 'out of stock', 'no longer manufactured', 'invalid part', 'success', 'Invalid Part'])\n \n\n# ----------------------------------- Class: A1 -------------------------------------------\n# -------------------------------------------------------------------------------------------\n \n # ------------------------------ Method: main_function ---------------------------------\n def test_main_function(self):\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'],\n ['1234', '5678'], ['2', '25']), \"Dealer is authorized, check the response in output.xml\")\n self.assertEqual(A1.main_function([None, 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'],\n ['2', '25']), \"Invalid Input XML Response Error: in Dealer Id\")\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', None], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'],\n ['1234', '5678'], ['2', '25']), \"Invalid Input XML Response Error: in Dealer Access Key\")\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], [None, '35 Streetname', 'Halifax', 'NS', 'B2T1A4'], ['1234', '5678'], \n ['2', '25']), \"Invalid Input XML Response: Error in Delivery Details\")\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', None, 'Halifax', 'NS', 'B2T1A4'],\n ['1234', '5678'], ['2', '25']), \"Invalid Input XML Response: Error in Delivery Details\")\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', None, 'NS', 'B2T1A4'],\n ['1234', '5678'], ['2', '25']), \"Invalid Input XML Response: Error in Delivery Details\")\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', None, 'B2T1A4'], \n ['1234', '5678'], ['2', '25']), \"Invalid Input XML Response: Error in Delivery Details\")\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', None], \n ['1234', '5678'], ['2', '25']), \"Invalid Input XML Response: Error in Delivery Details\")\n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'],\n [\"0000\", '5678'], ['2', '25']), \"Dealer is authorized, check the response in output.xml\") \n self.assertEqual(A1.main_function(['XXX-1234-ABCD-1234', 'kkklas8882kk23nllfjj88290'], ['Mrs. Jane Smith', '35 Streetname', 'Halifax', 'NS', 'B2T1A4'],\n ['1234', '5678'], ['0', '25']), \"Invalid Input XML Response: Error in Quantity\")\n\n\n \nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
6,
7,
8,
9,
11
]
}
|
[
6,
7,
8,
9,
11
] |
# -*- coding: utf-8 -*-
import scrapy
from selenium import webdriver
import datetime
class GoldpriceSpider(scrapy.Spider):
name = 'goldprice'
allowed_domains = ['g-banker.com']
start_urls = ['https://g-banker.com/']
def __init__(self):
self.browser = webdriver.PhantomJS()
self.price = None
def parse(self, response):
# print response.text
self.browser.get(response.url)
self.price = float(self.browser.find_element_by_xpath('//*[@id="J_price"]').text)
def close(self,spider, reason):
hour = datetime.datetime.now().hour
if(self.price != None):
if int(hour) < 22:
if(self.price > 278 or self.price < 270):
from scrapy.mail import MailSender
# mailer = MailSender.from_settings(settings)# 出错了,没找到原因
mailer = MailSender(
smtphost = "smtp.163.com", # 发送邮件的服务器
mailfrom = "[email protected]", # 邮件发送者
smtpuser = "[email protected]", # 用户名
smtppass = "yan18779865344", # 发送邮箱的密码不是你注册时的密码,而是授权码!!!切记!
smtpport = 25 # 端口号
)
body = u"""
实时爬取的黄金价格为:
""" + str(self.price)
subject = u'爬取的黄金实时价格'
# 如果说发送的内容太过简单的话,很可能会被当做垃圾邮件给禁止发送。
mailer.send(to=["[email protected]"], subject = subject.encode("utf-8"), body = body.encode("utf-8"))
def __del__(self):
self.browser.close()
|
normal
|
{
"blob_id": "e59404149c739a40316ca16ab767cbc48aa9b685",
"index": 3526,
"step-1": "<mask token>\n\n\nclass GoldpriceSpider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self):\n self.browser = webdriver.PhantomJS()\n self.price = None\n\n def parse(self, response):\n self.browser.get(response.url)\n self.price = float(self.browser.find_element_by_xpath(\n '//*[@id=\"J_price\"]').text)\n\n def close(self, spider, reason):\n hour = datetime.datetime.now().hour\n if self.price != None:\n if int(hour) < 22:\n if self.price > 278 or self.price < 270:\n from scrapy.mail import MailSender\n mailer = MailSender(smtphost='smtp.163.com', mailfrom=\n '[email protected]', smtpuser=\n '[email protected]', smtppass='yan18779865344',\n smtpport=25)\n body = (\n u\"\"\"\n 实时爬取的黄金价格为:\n \"\"\"\n + str(self.price))\n subject = u'爬取的黄金实时价格'\n mailer.send(to=['[email protected]'], subject=subject.\n encode('utf-8'), body=body.encode('utf-8'))\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass GoldpriceSpider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self):\n self.browser = webdriver.PhantomJS()\n self.price = None\n\n def parse(self, response):\n self.browser.get(response.url)\n self.price = float(self.browser.find_element_by_xpath(\n '//*[@id=\"J_price\"]').text)\n\n def close(self, spider, reason):\n hour = datetime.datetime.now().hour\n if self.price != None:\n if int(hour) < 22:\n if self.price > 278 or self.price < 270:\n from scrapy.mail import MailSender\n mailer = MailSender(smtphost='smtp.163.com', mailfrom=\n '[email protected]', smtpuser=\n '[email protected]', smtppass='yan18779865344',\n smtpport=25)\n body = (\n u\"\"\"\n 实时爬取的黄金价格为:\n \"\"\"\n + str(self.price))\n subject = u'爬取的黄金实时价格'\n mailer.send(to=['[email protected]'], subject=subject.\n encode('utf-8'), body=body.encode('utf-8'))\n\n def __del__(self):\n self.browser.close()\n",
"step-3": "<mask token>\n\n\nclass GoldpriceSpider(scrapy.Spider):\n name = 'goldprice'\n allowed_domains = ['g-banker.com']\n start_urls = ['https://g-banker.com/']\n\n def __init__(self):\n self.browser = webdriver.PhantomJS()\n self.price = None\n\n def parse(self, response):\n self.browser.get(response.url)\n self.price = float(self.browser.find_element_by_xpath(\n '//*[@id=\"J_price\"]').text)\n\n def close(self, spider, reason):\n hour = datetime.datetime.now().hour\n if self.price != None:\n if int(hour) < 22:\n if self.price > 278 or self.price < 270:\n from scrapy.mail import MailSender\n mailer = MailSender(smtphost='smtp.163.com', mailfrom=\n '[email protected]', smtpuser=\n '[email protected]', smtppass='yan18779865344',\n smtpport=25)\n body = (\n u\"\"\"\n 实时爬取的黄金价格为:\n \"\"\"\n + str(self.price))\n subject = u'爬取的黄金实时价格'\n mailer.send(to=['[email protected]'], subject=subject.\n encode('utf-8'), body=body.encode('utf-8'))\n\n def __del__(self):\n self.browser.close()\n",
"step-4": "import scrapy\nfrom selenium import webdriver\nimport datetime\n\n\nclass GoldpriceSpider(scrapy.Spider):\n name = 'goldprice'\n allowed_domains = ['g-banker.com']\n start_urls = ['https://g-banker.com/']\n\n def __init__(self):\n self.browser = webdriver.PhantomJS()\n self.price = None\n\n def parse(self, response):\n self.browser.get(response.url)\n self.price = float(self.browser.find_element_by_xpath(\n '//*[@id=\"J_price\"]').text)\n\n def close(self, spider, reason):\n hour = datetime.datetime.now().hour\n if self.price != None:\n if int(hour) < 22:\n if self.price > 278 or self.price < 270:\n from scrapy.mail import MailSender\n mailer = MailSender(smtphost='smtp.163.com', mailfrom=\n '[email protected]', smtpuser=\n '[email protected]', smtppass='yan18779865344',\n smtpport=25)\n body = (\n u\"\"\"\n 实时爬取的黄金价格为:\n \"\"\"\n + str(self.price))\n subject = u'爬取的黄金实时价格'\n mailer.send(to=['[email protected]'], subject=subject.\n encode('utf-8'), body=body.encode('utf-8'))\n\n def __del__(self):\n self.browser.close()\n",
"step-5": "# -*- coding: utf-8 -*-\n\nimport scrapy\nfrom selenium import webdriver\nimport datetime\n\nclass GoldpriceSpider(scrapy.Spider):\n name = 'goldprice'\n allowed_domains = ['g-banker.com']\n start_urls = ['https://g-banker.com/']\n\n def __init__(self):\n self.browser = webdriver.PhantomJS()\n self.price = None\n\n def parse(self, response):\n # print response.text\n self.browser.get(response.url)\n self.price = float(self.browser.find_element_by_xpath('//*[@id=\"J_price\"]').text)\n\n\n def close(self,spider, reason):\n hour = datetime.datetime.now().hour\n if(self.price != None):\n if int(hour) < 22:\n if(self.price > 278 or self.price < 270):\n from scrapy.mail import MailSender\n # mailer = MailSender.from_settings(settings)# 出错了,没找到原因\n mailer = MailSender(\n smtphost = \"smtp.163.com\", # 发送邮件的服务器\n mailfrom = \"[email protected]\", # 邮件发送者\n smtpuser = \"[email protected]\", # 用户名\n smtppass = \"yan18779865344\", # 发送邮箱的密码不是你注册时的密码,而是授权码!!!切记!\n smtpport = 25 # 端口号\n )\n\n body = u\"\"\"\n 实时爬取的黄金价格为:\n \"\"\" + str(self.price)\n subject = u'爬取的黄金实时价格'\n # 如果说发送的内容太过简单的话,很可能会被当做垃圾邮件给禁止发送。\n mailer.send(to=[\"[email protected]\"], subject = subject.encode(\"utf-8\"), body = body.encode(\"utf-8\"))\n\n def __del__(self):\n self.browser.close()\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
# -*- coding: utf-8 -*-
#!/bin/python3
import websocket
import json
import time
from loraCrypto import LoRaCrypto
from binascii import hexlify
'''
没有加密的数据
{
cmd: 'tx';
EUI: string;
port: number;
data: string
}
加密的数据
{
cmd: 'tx';
EUI: string;
port: number;
encdata: string;
seqno: number;
}
'''
GATEWAY_ID = "be7a0029"
TOKEN = "7AXCO2-Kkle42YGVVKvmmQ"
# 目标设备信息
EUI = "BE7A0000000005D2"
ADDR = "00aa1174"
LASTEST_SEQ = 4739
APP_SKEY = "2b7e151628aed2a6abf7158809cf4f3c"
# 需要下载的文件
FILE_NAME = "lora.bin"
PACKET_SIZE = 50
sendData = {}
def main():
ws = websocket.WebSocket()
ws.connect("wss://www.loriot.io/app?id="+GATEWAY_ID+"&token="+TOKEN)
lc = LoRaCrypto()
with open(FILE_NAME, "rb") as downloadFile:
binData =downloadFile.read()
count = len(binData) // PACKET_SIZE
sendData["cmd"] = "tx"
sendData["EUI"] = EUI
sendData["port"] = 1
seq = LASTEST_SEQ
print("Upload start!")
for i in range(count+1):
packetBin = binData[i*PACKET_SIZE:i*PACKET_SIZE+PACKET_SIZE]
packetStr = hexlify(packetBin).decode()
packetEncStr = lc.PayloadEncrypt(packetStr, APP_SKEY, ADDR, 1, seq)
sendData["encdata"] = packetEncStr
sendData["seqno"] = seq
print("Packet %d:" % i)
print("Before encrypt:")
print(packetStr)
print("After encrypt:")
print(packetEncStr)
print("Sequence is %d" % seq)
ws.send(json.dumps(sendData))
seq += 1
time.sleep(10)
print("Upload finish!")
ws.close()
if __name__ == "__main__":
from server.app.libs.loraencrypto import wrap_data
print wrap_data('he', 'BX32903', 20)
|
normal
|
{
"blob_id": "3683b1f799fa315d736e4b62c9c093360afa893f",
"index": 2052,
"step-1": "# -*- coding: utf-8 -*-\n#!/bin/python3\nimport websocket\nimport json\nimport time\nfrom loraCrypto import LoRaCrypto\nfrom binascii import hexlify\n\n'''\n没有加密的数据\n{\n\tcmd: 'tx';\n\tEUI: string;\n\tport: number;\n\tdata: string\n}\n\n加密的数据\n{\n\tcmd: 'tx';\n\tEUI: string;\n\tport: number;\n\tencdata: string;\n\tseqno: number;\n}\n'''\n\nGATEWAY_ID = \"be7a0029\"\nTOKEN = \"7AXCO2-Kkle42YGVVKvmmQ\"\n\n# 目标设备信息\nEUI = \"BE7A0000000005D2\"\nADDR = \"00aa1174\"\nLASTEST_SEQ = 4739 \nAPP_SKEY = \"2b7e151628aed2a6abf7158809cf4f3c\"\n\n# 需要下载的文件\nFILE_NAME = \"lora.bin\" \nPACKET_SIZE = 50\n\n\nsendData = {}\n\ndef main():\n ws = websocket.WebSocket()\n ws.connect(\"wss://www.loriot.io/app?id=\"+GATEWAY_ID+\"&token=\"+TOKEN)\n lc = LoRaCrypto()\n\n with open(FILE_NAME, \"rb\") as downloadFile:\n binData =downloadFile.read()\n\n count = len(binData) // PACKET_SIZE\n\n sendData[\"cmd\"] = \"tx\"\n sendData[\"EUI\"] = EUI\n sendData[\"port\"] = 1\n seq = LASTEST_SEQ\n\n print(\"Upload start!\")\n for i in range(count+1):\n packetBin = binData[i*PACKET_SIZE:i*PACKET_SIZE+PACKET_SIZE]\n packetStr = hexlify(packetBin).decode()\n packetEncStr = lc.PayloadEncrypt(packetStr, APP_SKEY, ADDR, 1, seq)\n sendData[\"encdata\"] = packetEncStr\n sendData[\"seqno\"] = seq\n \t\t\n print(\"Packet %d:\" % i)\n print(\"Before encrypt:\")\n print(packetStr)\n print(\"After encrypt:\")\n print(packetEncStr)\n print(\"Sequence is %d\" % seq)\n ws.send(json.dumps(sendData))\n seq += 1\n time.sleep(10)\n \n print(\"Upload finish!\")\n ws.close()\n\nif __name__ == \"__main__\":\n from server.app.libs.loraencrypto import wrap_data\n print wrap_data('he', 'BX32903', 20)\n\n\n\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class Student:
<|reserved_special_token_0|>
def displayDetails(self):
print('{} \t {} \t {}'.format(self.name, self.rollNo, self.marks))
try:
if self.marks < 40:
raise Fail('Student {} has Scored {} marks and has Failed '
.format(self.name, self.marks))
except Fail as f:
print(f)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Fail(Exception):
def __init__(self, message):
super().__init__(message)
class Student:
def __init__(self, rollNo, name, marks):
self.rollNo = rollNo
self.name = name
self.marks = marks
def displayDetails(self):
print('{} \t {} \t {}'.format(self.name, self.rollNo, self.marks))
try:
if self.marks < 40:
raise Fail('Student {} has Scored {} marks and has Failed '
.format(self.name, self.marks))
except Fail as f:
print(f)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Fail(Exception):
def __init__(self, message):
super().__init__(message)
class Student:
def __init__(self, rollNo, name, marks):
self.rollNo = rollNo
self.name = name
self.marks = marks
def displayDetails(self):
print('{} \t {} \t {}'.format(self.name, self.rollNo, self.marks))
try:
if self.marks < 40:
raise Fail('Student {} has Scored {} marks and has Failed '
.format(self.name, self.marks))
except Fail as f:
print(f)
<|reserved_special_token_0|>
for i in range(num):
rollNo, name, marks = input('Enter Roll-no,Name,Marks of Student {} : '
.format(i + 1)).split(',')
print('----------------------------------------')
marks = int(marks)
myStudentList.append(Student(rollNo, name, marks))
print('DETAILS OF STUDENTS ARE : ')
for i in range(num):
myStudentList[i].displayDetails()
<|reserved_special_token_1|>
class Fail(Exception):
def __init__(self, message):
super().__init__(message)
class Student:
def __init__(self, rollNo, name, marks):
self.rollNo = rollNo
self.name = name
self.marks = marks
def displayDetails(self):
print('{} \t {} \t {}'.format(self.name, self.rollNo, self.marks))
try:
if self.marks < 40:
raise Fail('Student {} has Scored {} marks and has Failed '
.format(self.name, self.marks))
except Fail as f:
print(f)
myStudentList = []
num = int(input('Enter the number of Students : '))
for i in range(num):
rollNo, name, marks = input('Enter Roll-no,Name,Marks of Student {} : '
.format(i + 1)).split(',')
print('----------------------------------------')
marks = int(marks)
myStudentList.append(Student(rollNo, name, marks))
print('DETAILS OF STUDENTS ARE : ')
for i in range(num):
myStudentList[i].displayDetails()
|
flexible
|
{
"blob_id": "ddf074e400551d2c147d898fe876a31d13a72699",
"index": 5324,
"step-1": "<mask token>\n\n\nclass Student:\n <mask token>\n\n def displayDetails(self):\n print('{} \\t {} \\t {}'.format(self.name, self.rollNo, self.marks))\n try:\n if self.marks < 40:\n raise Fail('Student {} has Scored {} marks and has Failed '\n .format(self.name, self.marks))\n except Fail as f:\n print(f)\n\n\n<mask token>\n",
"step-2": "class Fail(Exception):\n\n def __init__(self, message):\n super().__init__(message)\n\n\nclass Student:\n\n def __init__(self, rollNo, name, marks):\n self.rollNo = rollNo\n self.name = name\n self.marks = marks\n\n def displayDetails(self):\n print('{} \\t {} \\t {}'.format(self.name, self.rollNo, self.marks))\n try:\n if self.marks < 40:\n raise Fail('Student {} has Scored {} marks and has Failed '\n .format(self.name, self.marks))\n except Fail as f:\n print(f)\n\n\n<mask token>\n",
"step-3": "class Fail(Exception):\n\n def __init__(self, message):\n super().__init__(message)\n\n\nclass Student:\n\n def __init__(self, rollNo, name, marks):\n self.rollNo = rollNo\n self.name = name\n self.marks = marks\n\n def displayDetails(self):\n print('{} \\t {} \\t {}'.format(self.name, self.rollNo, self.marks))\n try:\n if self.marks < 40:\n raise Fail('Student {} has Scored {} marks and has Failed '\n .format(self.name, self.marks))\n except Fail as f:\n print(f)\n\n\n<mask token>\nfor i in range(num):\n rollNo, name, marks = input('Enter Roll-no,Name,Marks of Student {} : '\n .format(i + 1)).split(',')\n print('----------------------------------------')\n marks = int(marks)\n myStudentList.append(Student(rollNo, name, marks))\nprint('DETAILS OF STUDENTS ARE : ')\nfor i in range(num):\n myStudentList[i].displayDetails()\n",
"step-4": "class Fail(Exception):\n\n def __init__(self, message):\n super().__init__(message)\n\n\nclass Student:\n\n def __init__(self, rollNo, name, marks):\n self.rollNo = rollNo\n self.name = name\n self.marks = marks\n\n def displayDetails(self):\n print('{} \\t {} \\t {}'.format(self.name, self.rollNo, self.marks))\n try:\n if self.marks < 40:\n raise Fail('Student {} has Scored {} marks and has Failed '\n .format(self.name, self.marks))\n except Fail as f:\n print(f)\n\n\nmyStudentList = []\nnum = int(input('Enter the number of Students : '))\nfor i in range(num):\n rollNo, name, marks = input('Enter Roll-no,Name,Marks of Student {} : '\n .format(i + 1)).split(',')\n print('----------------------------------------')\n marks = int(marks)\n myStudentList.append(Student(rollNo, name, marks))\nprint('DETAILS OF STUDENTS ARE : ')\nfor i in range(num):\n myStudentList[i].displayDetails()\n",
"step-5": null,
"step-ids": [
2,
5,
6,
7
]
}
|
[
2,
5,
6,
7
] |
import numpy as np
import matplotlib.pyplot as plt
import networkx as nx
import time
import sys
class ConsensusSimulation:
"""Class to model a general consensus problem
see DOI: 10.1109/JPROC.2006.887293"""
def __init__(self,
topology,
dynamics,
dynamics_args,
time_step=0.01,
x_init=None,
convergence_warning=True,
delay=0):
# check arguments are of the
# correct form
if(isinstance(topology,nx.Graph)):
self.graph = topology
self.size = len(self.graph)
else:
print("Argument Error: topology must be type"
, type(nx.Graph()))
if(callable(dynamics)):
self.f = dynamics
if(len(dynamics_args)==1):
self.f_arg = (dynamics_args,1)
self.f_arg = dynamics_args
else:
print("Argument Error: dynamics must be a function")
self.dt = time_step
self.tau = delay
# set up initial vector to
# 1,2,3,...,n
if(not isinstance(x_init, type(np.ones(1))) and x_init==None):
self.x = np.linspace(1,self.size,self.size)
self.x = self.x.reshape(self.size,1)
else:
self.x = x_init.copy().reshape(self.size,1)
# The Laplacian matrix, quite the building block
# for the algorithms
self.L = nx.laplacian_matrix(self.graph).todense()
self.X = list()
self.T = list()
# connected graph won't converge
# maybe there's some algorithm that will
# though...
self.warn = convergence_warning
self.d_max = max(np.array(self.graph.degree)[:,1])
self.tau_max = (np.pi)/(4*self.d_max)
def disagreement(self):
"""Returns the 'error'/inhomogeneity in the
decision vector"""
return 0.5*(np.dot(np.dot(np.transpose(self.x),self.L),self.x)).item(0)
def agreement(self,tol=1e-6):
"""Test for convergence"""
if(self.disagreement()<tol):
return True
else:
return False
def run_sim(self,record_all=False,update_every=1.0):
"""run the core simulation"""
t=0
self.x_init = self.x
self.X = list()
self.T = list()
flag = False
self.X.append(self.x)
self.T.append(0)
start = time.time()
time_since_last_update = 0.0
progress = 1
while self.agreement() == False:
start_it = time.time()
if(t==0 and self.warn and not nx.is_connected(self.graph)):
print("Graph not connected, consensus algorithm will probably not converge!")
print("Simulating to 5 seconds...")
flag = True
if(flag and time.time()-start>5):
break
# core simulation done here
# very simple discretisation...
self.x = self.x+self.dt*self.f(self.x,*self.f_arg)
# odd way to test for 1,2,3,etc
# when arg is float
if (record_all):
self.X.append(self.x)
self.T.append(time.time()-start)
else:
if (t-np.floor(t)<1e-2):
self.X.append(self.x)
self.T.append(time.time()-start)
t = t+self.dt
end = time.time()-start_it
time_since_last_update += end
if time_since_last_update >= update_every:
sys.stdout.write("\r" + "Iteration: {}, disagreement: {}, time: {}".format(progress,self.disagreement(),time.time()-start))
sys.stdout.flush()
time_since_last_update = 0.0
progress += 1
print("")
end = time.time()
return self.T[-1]
def sim_delay(self, delay = 1, runtime=100):
t=0
self.tau=delay
self.x_init = self.x
self.X = list()
self.T = list()
flag = False
for i in range(0,delay+1):
self.X.append(self.x)
self.T.append(0)
start = time.time()
while self.agreement() == False:
if (self.T[-1] > runtime):
break
if (t==0 and self.warn and not nx.is_connected(self.graph)):
print("Graph not connected, consensus algorithm will probably not converge!")
print("Simulating to 5 seconds...")
flag = True
if(flag and time.time()-start>5):
break
# core simulation done here
# very simple discretisation...
self.x = self.X[-1]
if (len(self.X)-delay<0):
pass
else:
index = len(self.X)-delay
self.x = self.X[-1]+self.dt*self.f(self.X[index],*self.f_arg)
# odd way to test for 1,2,3,etc
# when arg is float
self.X.append(self.x)
self.T.append(time.time()-start)
t = t+self.dt
end = time.time()
return self.T[-1]
def plot(self, weight_average=False):
"""Show the convergence analysis"""
if(len(self.X)==0 or len(self.T)==0):
print("Nothing to plot...")
x = np.array(self.X)
for i in range(0,x.shape[1]):
plt.plot(self.T,x[:,i,0])
if(weight_average):
w_i = np.zeros(self.size)
s = sum(np.array(self.graph.degree)[:,1])
x = self.x_init
for i in nx.nodes(self.graph):
w_i[i] = self.graph.degree(i)/s
x[i] = x[i]*w_i[i]
plt.plot(np.linspace(0,self.T[-1],10),np.zeros(10)+sum(x), label="Connected graph consensus: "+str(sum(x)),color='red',marker='s')
else:
plt.plot(np.linspace(0,self.T[-1],10),np.zeros(10)+np.mean(self.x_init), label="Connected graph consensus: "+str(round(np.mean(self.x_init),3)),color='red',marker='s')
plt.grid()
plt.xlabel("Time (seconds)")
plt.ylabel("State")
plt.title("Convergence of consensus algorithm")
plt.legend()
def print_delay(self):
print("Delay in seconds")
return self.dt*self.tau
def delay_stable_max(self):
d = maximum_degree(self.graph)
return (np.pi)/(4*d[1])
|
normal
|
{
"blob_id": "3164eab8dc221149c9f865645edf9991d810d2ac",
"index": 8698,
"step-1": "<mask token>\n\n\nclass ConsensusSimulation:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def run_sim(self, record_all=False, update_every=1.0):\n \"\"\"run the core simulation\"\"\"\n t = 0\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n time_since_last_update = 0.0\n progress = 1\n while self.agreement() == False:\n start_it = time.time()\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.x + self.dt * self.f(self.x, *self.f_arg)\n if record_all:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n elif t - np.floor(t) < 0.01:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time() - start_it\n time_since_last_update += end\n if time_since_last_update >= update_every:\n sys.stdout.write('\\r' +\n 'Iteration: {}, disagreement: {}, time: {}'.format(\n progress, self.disagreement(), time.time() - start))\n sys.stdout.flush()\n time_since_last_update = 0.0\n progress += 1\n print('')\n end = time.time()\n return self.T[-1]\n\n def sim_delay(self, delay=1, runtime=100):\n t = 0\n self.tau = delay\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n for i in range(0, delay + 1):\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n while self.agreement() == False:\n if self.T[-1] > runtime:\n break\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.X[-1]\n if len(self.X) - delay < 0:\n pass\n else:\n index = len(self.X) - delay\n self.x = self.X[-1] + self.dt * self.f(self.X[index], *self\n .f_arg)\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time()\n return self.T[-1]\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ConsensusSimulation:\n <mask token>\n\n def __init__(self, topology, dynamics, dynamics_args, time_step=0.01,\n x_init=None, convergence_warning=True, delay=0):\n if isinstance(topology, nx.Graph):\n self.graph = topology\n self.size = len(self.graph)\n else:\n print('Argument Error: topology must be type', type(nx.Graph()))\n if callable(dynamics):\n self.f = dynamics\n if len(dynamics_args) == 1:\n self.f_arg = dynamics_args, 1\n self.f_arg = dynamics_args\n else:\n print('Argument Error: dynamics must be a function')\n self.dt = time_step\n self.tau = delay\n if not isinstance(x_init, type(np.ones(1))) and x_init == None:\n self.x = np.linspace(1, self.size, self.size)\n self.x = self.x.reshape(self.size, 1)\n else:\n self.x = x_init.copy().reshape(self.size, 1)\n self.L = nx.laplacian_matrix(self.graph).todense()\n self.X = list()\n self.T = list()\n self.warn = convergence_warning\n self.d_max = max(np.array(self.graph.degree)[:, 1])\n self.tau_max = np.pi / (4 * self.d_max)\n\n def disagreement(self):\n \"\"\"Returns the 'error'/inhomogeneity in the\n decision vector\"\"\"\n return 0.5 * np.dot(np.dot(np.transpose(self.x), self.L), self.x).item(\n 0)\n\n def agreement(self, tol=1e-06):\n \"\"\"Test for convergence\"\"\"\n if self.disagreement() < tol:\n return True\n else:\n return False\n\n def run_sim(self, record_all=False, update_every=1.0):\n \"\"\"run the core simulation\"\"\"\n t = 0\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n time_since_last_update = 0.0\n progress = 1\n while self.agreement() == False:\n start_it = time.time()\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.x + self.dt * self.f(self.x, *self.f_arg)\n if record_all:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n elif t - np.floor(t) < 0.01:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time() - start_it\n time_since_last_update += end\n if time_since_last_update >= update_every:\n sys.stdout.write('\\r' +\n 'Iteration: {}, disagreement: {}, time: {}'.format(\n progress, self.disagreement(), time.time() - start))\n sys.stdout.flush()\n time_since_last_update = 0.0\n progress += 1\n print('')\n end = time.time()\n return self.T[-1]\n\n def sim_delay(self, delay=1, runtime=100):\n t = 0\n self.tau = delay\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n for i in range(0, delay + 1):\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n while self.agreement() == False:\n if self.T[-1] > runtime:\n break\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.X[-1]\n if len(self.X) - delay < 0:\n pass\n else:\n index = len(self.X) - delay\n self.x = self.X[-1] + self.dt * self.f(self.X[index], *self\n .f_arg)\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time()\n return self.T[-1]\n <mask token>\n\n def print_delay(self):\n print('Delay in seconds')\n return self.dt * self.tau\n\n def delay_stable_max(self):\n d = maximum_degree(self.graph)\n return np.pi / (4 * d[1])\n",
"step-3": "<mask token>\n\n\nclass ConsensusSimulation:\n <mask token>\n\n def __init__(self, topology, dynamics, dynamics_args, time_step=0.01,\n x_init=None, convergence_warning=True, delay=0):\n if isinstance(topology, nx.Graph):\n self.graph = topology\n self.size = len(self.graph)\n else:\n print('Argument Error: topology must be type', type(nx.Graph()))\n if callable(dynamics):\n self.f = dynamics\n if len(dynamics_args) == 1:\n self.f_arg = dynamics_args, 1\n self.f_arg = dynamics_args\n else:\n print('Argument Error: dynamics must be a function')\n self.dt = time_step\n self.tau = delay\n if not isinstance(x_init, type(np.ones(1))) and x_init == None:\n self.x = np.linspace(1, self.size, self.size)\n self.x = self.x.reshape(self.size, 1)\n else:\n self.x = x_init.copy().reshape(self.size, 1)\n self.L = nx.laplacian_matrix(self.graph).todense()\n self.X = list()\n self.T = list()\n self.warn = convergence_warning\n self.d_max = max(np.array(self.graph.degree)[:, 1])\n self.tau_max = np.pi / (4 * self.d_max)\n\n def disagreement(self):\n \"\"\"Returns the 'error'/inhomogeneity in the\n decision vector\"\"\"\n return 0.5 * np.dot(np.dot(np.transpose(self.x), self.L), self.x).item(\n 0)\n\n def agreement(self, tol=1e-06):\n \"\"\"Test for convergence\"\"\"\n if self.disagreement() < tol:\n return True\n else:\n return False\n\n def run_sim(self, record_all=False, update_every=1.0):\n \"\"\"run the core simulation\"\"\"\n t = 0\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n time_since_last_update = 0.0\n progress = 1\n while self.agreement() == False:\n start_it = time.time()\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.x + self.dt * self.f(self.x, *self.f_arg)\n if record_all:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n elif t - np.floor(t) < 0.01:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time() - start_it\n time_since_last_update += end\n if time_since_last_update >= update_every:\n sys.stdout.write('\\r' +\n 'Iteration: {}, disagreement: {}, time: {}'.format(\n progress, self.disagreement(), time.time() - start))\n sys.stdout.flush()\n time_since_last_update = 0.0\n progress += 1\n print('')\n end = time.time()\n return self.T[-1]\n\n def sim_delay(self, delay=1, runtime=100):\n t = 0\n self.tau = delay\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n for i in range(0, delay + 1):\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n while self.agreement() == False:\n if self.T[-1] > runtime:\n break\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.X[-1]\n if len(self.X) - delay < 0:\n pass\n else:\n index = len(self.X) - delay\n self.x = self.X[-1] + self.dt * self.f(self.X[index], *self\n .f_arg)\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time()\n return self.T[-1]\n\n def plot(self, weight_average=False):\n \"\"\"Show the convergence analysis\"\"\"\n if len(self.X) == 0 or len(self.T) == 0:\n print('Nothing to plot...')\n x = np.array(self.X)\n for i in range(0, x.shape[1]):\n plt.plot(self.T, x[:, i, 0])\n if weight_average:\n w_i = np.zeros(self.size)\n s = sum(np.array(self.graph.degree)[:, 1])\n x = self.x_init\n for i in nx.nodes(self.graph):\n w_i[i] = self.graph.degree(i) / s\n x[i] = x[i] * w_i[i]\n plt.plot(np.linspace(0, self.T[-1], 10), np.zeros(10) + sum(x),\n label='Connected graph consensus: ' + str(sum(x)), color=\n 'red', marker='s')\n else:\n plt.plot(np.linspace(0, self.T[-1], 10), np.zeros(10) + np.mean\n (self.x_init), label='Connected graph consensus: ' + str(\n round(np.mean(self.x_init), 3)), color='red', marker='s')\n plt.grid()\n plt.xlabel('Time (seconds)')\n plt.ylabel('State')\n plt.title('Convergence of consensus algorithm')\n plt.legend()\n\n def print_delay(self):\n print('Delay in seconds')\n return self.dt * self.tau\n\n def delay_stable_max(self):\n d = maximum_degree(self.graph)\n return np.pi / (4 * d[1])\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\nimport networkx as nx\nimport time\nimport sys\n\n\nclass ConsensusSimulation:\n \"\"\"Class to model a general consensus problem\n see DOI: 10.1109/JPROC.2006.887293\"\"\"\n\n def __init__(self, topology, dynamics, dynamics_args, time_step=0.01,\n x_init=None, convergence_warning=True, delay=0):\n if isinstance(topology, nx.Graph):\n self.graph = topology\n self.size = len(self.graph)\n else:\n print('Argument Error: topology must be type', type(nx.Graph()))\n if callable(dynamics):\n self.f = dynamics\n if len(dynamics_args) == 1:\n self.f_arg = dynamics_args, 1\n self.f_arg = dynamics_args\n else:\n print('Argument Error: dynamics must be a function')\n self.dt = time_step\n self.tau = delay\n if not isinstance(x_init, type(np.ones(1))) and x_init == None:\n self.x = np.linspace(1, self.size, self.size)\n self.x = self.x.reshape(self.size, 1)\n else:\n self.x = x_init.copy().reshape(self.size, 1)\n self.L = nx.laplacian_matrix(self.graph).todense()\n self.X = list()\n self.T = list()\n self.warn = convergence_warning\n self.d_max = max(np.array(self.graph.degree)[:, 1])\n self.tau_max = np.pi / (4 * self.d_max)\n\n def disagreement(self):\n \"\"\"Returns the 'error'/inhomogeneity in the\n decision vector\"\"\"\n return 0.5 * np.dot(np.dot(np.transpose(self.x), self.L), self.x).item(\n 0)\n\n def agreement(self, tol=1e-06):\n \"\"\"Test for convergence\"\"\"\n if self.disagreement() < tol:\n return True\n else:\n return False\n\n def run_sim(self, record_all=False, update_every=1.0):\n \"\"\"run the core simulation\"\"\"\n t = 0\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n time_since_last_update = 0.0\n progress = 1\n while self.agreement() == False:\n start_it = time.time()\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.x + self.dt * self.f(self.x, *self.f_arg)\n if record_all:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n elif t - np.floor(t) < 0.01:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time() - start_it\n time_since_last_update += end\n if time_since_last_update >= update_every:\n sys.stdout.write('\\r' +\n 'Iteration: {}, disagreement: {}, time: {}'.format(\n progress, self.disagreement(), time.time() - start))\n sys.stdout.flush()\n time_since_last_update = 0.0\n progress += 1\n print('')\n end = time.time()\n return self.T[-1]\n\n def sim_delay(self, delay=1, runtime=100):\n t = 0\n self.tau = delay\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n for i in range(0, delay + 1):\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n while self.agreement() == False:\n if self.T[-1] > runtime:\n break\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.X[-1]\n if len(self.X) - delay < 0:\n pass\n else:\n index = len(self.X) - delay\n self.x = self.X[-1] + self.dt * self.f(self.X[index], *self\n .f_arg)\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time()\n return self.T[-1]\n\n def plot(self, weight_average=False):\n \"\"\"Show the convergence analysis\"\"\"\n if len(self.X) == 0 or len(self.T) == 0:\n print('Nothing to plot...')\n x = np.array(self.X)\n for i in range(0, x.shape[1]):\n plt.plot(self.T, x[:, i, 0])\n if weight_average:\n w_i = np.zeros(self.size)\n s = sum(np.array(self.graph.degree)[:, 1])\n x = self.x_init\n for i in nx.nodes(self.graph):\n w_i[i] = self.graph.degree(i) / s\n x[i] = x[i] * w_i[i]\n plt.plot(np.linspace(0, self.T[-1], 10), np.zeros(10) + sum(x),\n label='Connected graph consensus: ' + str(sum(x)), color=\n 'red', marker='s')\n else:\n plt.plot(np.linspace(0, self.T[-1], 10), np.zeros(10) + np.mean\n (self.x_init), label='Connected graph consensus: ' + str(\n round(np.mean(self.x_init), 3)), color='red', marker='s')\n plt.grid()\n plt.xlabel('Time (seconds)')\n plt.ylabel('State')\n plt.title('Convergence of consensus algorithm')\n plt.legend()\n\n def print_delay(self):\n print('Delay in seconds')\n return self.dt * self.tau\n\n def delay_stable_max(self):\n d = maximum_degree(self.graph)\n return np.pi / (4 * d[1])\n",
"step-5": "import numpy as np\nimport matplotlib.pyplot as plt\nimport networkx as nx\nimport time\nimport sys\n\nclass ConsensusSimulation:\n \"\"\"Class to model a general consensus problem\n see DOI: 10.1109/JPROC.2006.887293\"\"\"\n def __init__(self,\n topology,\n dynamics,\n dynamics_args,\n time_step=0.01,\n x_init=None,\n convergence_warning=True,\n delay=0):\n # check arguments are of the\n # correct form\n if(isinstance(topology,nx.Graph)):\n self.graph = topology\n self.size = len(self.graph)\n else:\n print(\"Argument Error: topology must be type\"\n , type(nx.Graph()))\n if(callable(dynamics)):\n self.f = dynamics\n if(len(dynamics_args)==1):\n self.f_arg = (dynamics_args,1)\n self.f_arg = dynamics_args\n else:\n print(\"Argument Error: dynamics must be a function\")\n self.dt = time_step\n self.tau = delay\n # set up initial vector to\n # 1,2,3,...,n\n if(not isinstance(x_init, type(np.ones(1))) and x_init==None):\n self.x = np.linspace(1,self.size,self.size)\n self.x = self.x.reshape(self.size,1)\n else:\n self.x = x_init.copy().reshape(self.size,1)\n # The Laplacian matrix, quite the building block\n # for the algorithms\n self.L = nx.laplacian_matrix(self.graph).todense()\n self.X = list()\n self.T = list()\n # connected graph won't converge\n # maybe there's some algorithm that will\n # though...\n self.warn = convergence_warning\n\n self.d_max = max(np.array(self.graph.degree)[:,1])\n self.tau_max = (np.pi)/(4*self.d_max)\n\n def disagreement(self):\n \"\"\"Returns the 'error'/inhomogeneity in the\n decision vector\"\"\"\n return 0.5*(np.dot(np.dot(np.transpose(self.x),self.L),self.x)).item(0)\n\n def agreement(self,tol=1e-6):\n \"\"\"Test for convergence\"\"\"\n if(self.disagreement()<tol):\n return True\n else:\n return False\n\n def run_sim(self,record_all=False,update_every=1.0):\n \"\"\"run the core simulation\"\"\"\n t=0\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n time_since_last_update = 0.0\n progress = 1\n while self.agreement() == False:\n start_it = time.time()\n if(t==0 and self.warn and not nx.is_connected(self.graph)):\n print(\"Graph not connected, consensus algorithm will probably not converge!\")\n print(\"Simulating to 5 seconds...\")\n flag = True\n if(flag and time.time()-start>5):\n break\n # core simulation done here\n # very simple discretisation...\n self.x = self.x+self.dt*self.f(self.x,*self.f_arg)\n # odd way to test for 1,2,3,etc\n # when arg is float\n if (record_all):\n self.X.append(self.x)\n self.T.append(time.time()-start)\n else:\n if (t-np.floor(t)<1e-2):\n self.X.append(self.x)\n self.T.append(time.time()-start)\n t = t+self.dt\n end = time.time()-start_it\n time_since_last_update += end\n if time_since_last_update >= update_every:\n sys.stdout.write(\"\\r\" + \"Iteration: {}, disagreement: {}, time: {}\".format(progress,self.disagreement(),time.time()-start))\n sys.stdout.flush()\n time_since_last_update = 0.0\n progress += 1\n\n print(\"\")\n end = time.time()\n return self.T[-1]\n\n def sim_delay(self, delay = 1, runtime=100):\n t=0\n self.tau=delay\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n for i in range(0,delay+1):\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n while self.agreement() == False:\n if (self.T[-1] > runtime):\n break\n if (t==0 and self.warn and not nx.is_connected(self.graph)):\n print(\"Graph not connected, consensus algorithm will probably not converge!\")\n print(\"Simulating to 5 seconds...\")\n flag = True\n if(flag and time.time()-start>5):\n break\n # core simulation done here\n # very simple discretisation...\n self.x = self.X[-1]\n if (len(self.X)-delay<0):\n pass\n else:\n index = len(self.X)-delay\n self.x = self.X[-1]+self.dt*self.f(self.X[index],*self.f_arg)\n # odd way to test for 1,2,3,etc\n # when arg is float\n self.X.append(self.x)\n self.T.append(time.time()-start)\n t = t+self.dt\n end = time.time()\n return self.T[-1]\n\n def plot(self, weight_average=False):\n \"\"\"Show the convergence analysis\"\"\"\n if(len(self.X)==0 or len(self.T)==0):\n print(\"Nothing to plot...\")\n x = np.array(self.X)\n for i in range(0,x.shape[1]):\n plt.plot(self.T,x[:,i,0])\n if(weight_average):\n w_i = np.zeros(self.size)\n s = sum(np.array(self.graph.degree)[:,1])\n x = self.x_init\n for i in nx.nodes(self.graph):\n w_i[i] = self.graph.degree(i)/s\n x[i] = x[i]*w_i[i]\n plt.plot(np.linspace(0,self.T[-1],10),np.zeros(10)+sum(x), label=\"Connected graph consensus: \"+str(sum(x)),color='red',marker='s')\n else:\n plt.plot(np.linspace(0,self.T[-1],10),np.zeros(10)+np.mean(self.x_init), label=\"Connected graph consensus: \"+str(round(np.mean(self.x_init),3)),color='red',marker='s')\n plt.grid()\n plt.xlabel(\"Time (seconds)\")\n plt.ylabel(\"State\")\n plt.title(\"Convergence of consensus algorithm\")\n plt.legend()\n\n def print_delay(self):\n print(\"Delay in seconds\")\n return self.dt*self.tau\n\n def delay_stable_max(self):\n d = maximum_degree(self.graph)\n return (np.pi)/(4*d[1])\n",
"step-ids": [
3,
8,
9,
11,
12
]
}
|
[
3,
8,
9,
11,
12
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Bob(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Bob(object):
<|reserved_special_token_0|>
def hey(self, question):
if not question or question.strip() == '':
return self.silent_response
if question.isupper():
return self.yell_response
elif question.endswith('?'):
return self.question_response
return self.whatever
<|reserved_special_token_1|>
class Bob(object):
def __init__(self):
self.question_response = 'Sure.'
self.yell_response = 'Woah, chill out!'
self.silent_response = 'Fine. Be that way!'
self.whatever = 'Whatever.'
def hey(self, question):
if not question or question.strip() == '':
return self.silent_response
if question.isupper():
return self.yell_response
elif question.endswith('?'):
return self.question_response
return self.whatever
<|reserved_special_token_1|>
#!/usr/bin/python
class Bob(object):
def __init__(self):
self.question_response = "Sure."
self.yell_response = "Woah, chill out!"
self.silent_response = "Fine. Be that way!"
self.whatever = "Whatever."
def hey(self, question):
if not(question) or question.strip()=='':
return self.silent_response
if question.isupper():
return self.yell_response
elif question.endswith("?"):
return self.question_response
return self.whatever
|
flexible
|
{
"blob_id": "7ff7da216bdda5c30bf7c973c82886035b31247c",
"index": 4093,
"step-1": "<mask token>\n",
"step-2": "class Bob(object):\n <mask token>\n <mask token>\n",
"step-3": "class Bob(object):\n <mask token>\n\n def hey(self, question):\n if not question or question.strip() == '':\n return self.silent_response\n if question.isupper():\n return self.yell_response\n elif question.endswith('?'):\n return self.question_response\n return self.whatever\n",
"step-4": "class Bob(object):\n\n def __init__(self):\n self.question_response = 'Sure.'\n self.yell_response = 'Woah, chill out!'\n self.silent_response = 'Fine. Be that way!'\n self.whatever = 'Whatever.'\n\n def hey(self, question):\n if not question or question.strip() == '':\n return self.silent_response\n if question.isupper():\n return self.yell_response\n elif question.endswith('?'):\n return self.question_response\n return self.whatever\n",
"step-5": "#!/usr/bin/python\n\nclass Bob(object):\n def __init__(self):\n self.question_response = \"Sure.\"\n self.yell_response = \"Woah, chill out!\"\n self.silent_response = \"Fine. Be that way!\"\n self.whatever = \"Whatever.\"\n\n def hey(self, question):\n if not(question) or question.strip()=='':\n return self.silent_response\n if question.isupper():\n return self.yell_response\n elif question.endswith(\"?\"):\n return self.question_response\n return self.whatever\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def dpd1_process(lst):
"""已废弃的方法"""
if not lst:
return
for key, l in lst.items():
rule = getattr(BeforeInBomber, key).value
query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)
).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(
rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.
call_success()))
success_set = {i.loanid for i in query}
failed_list = list(set(l) - success_set)
post_params = {'$and': rule.get('$and'), 'app_list': failed_list}
resp = Hyperloop().post('/bomber/score/verify', json=post_params)
if not resp.ok:
logging.error('hyperloop score verification failed: %s, %s',
str(resp.status_code), str(resp.text))
logging.error('hyperloop score verification failed: %s', str(
post_params))
continue
logging.debug('hyperloop score verification success: %s', resp.content)
resp_json = resp.json()
app_list = resp_json['data']
if not app_list:
continue
for item in app_list:
if random.randint(0, 5) == 1:
send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':
int(item)})
@action(MessageAction.GET_IVR)
def get_ivr(payload, msg_id):
logging.warning('start get_ivr')
sys_config = SystemConfig.select().where(SystemConfig.key ==
'DPD1-3_INTO_IVR').first()
now = date.today()
if sys_config and sys_config.value:
start = now - timedelta(days=3)
else:
start = now
end = now + timedelta(days=4)
item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()
if not item:
AutoIVR.delete().execute()
current_page = 0
elif item.current_page >= item.total_page:
return
else:
current_page = item.current_page
auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,
'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,
'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':
10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,
'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,
'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,
'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,
'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,
'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,
'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,
'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,
'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,
'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,
'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':
101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,
'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,
'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,
'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}
current_page += 1
with db.atomic() as transaction:
while True:
bill_service = BillService()
ivr_action = bill_service.ivr_pages(page=current_page,
page_size=500, start_time=utc_datetime(str(start)),
end_time=utc_datetime(str(end)))
result = ivr_action['result']
page_size = int(ivr_action.get('page_size', 0))
total_page = int(ivr_action.get('total_page', 0))
insert_args = []
for a in result:
due_at = no_utc_datetime(a['due_at'])
days = (due_at.date() - now).days
if days == 2:
continue
if days > 0:
time = str(days)
else:
time = str(days).replace('-', 'PDP')
key = a['app_name'] + str(a['su']) + time
group = auto_ivr.get(key)
user_id = a['user_id']
try:
user_resp = AccountService().get_user(path_params={
'user_id': user_id})
if str(user_resp['mobile_no']) == str(a['user_mobile_no']):
numbers = a['user_mobile_no']
else:
numbers = a['user_mobile_no'] + ',' + user_resp.get(
'mobile_no')
except:
logging.error('Request Account Service Error.')
numbers = a['user_mobile_no']
insert_args.append({'application_id': a['id'], 'numbers':
numbers, 'group': group, 'user_id': user_id})
AutoIVR.insert_many(insert_args).execute()
if current_page == 1:
IVRActionLog.create(total_page=total_page, proc_date=now,
page_size=page_size, current_page=current_page)
item = IVRActionLog.get(IVRActionLog.proc_date == now)
else:
item.current_page = current_page
item.page_size = page_size
item.total_page = total_page
item.save()
transaction.commit()
current_page += 1
if current_page > int(total_page):
break
if sys_config and sys_config.value:
try:
classfiy_dpd_ptp_apps()
except Exception as e:
logging.error('dpd1-3_test_error:%s' % str(e))
def ivr_t2_test():
t2_groups = [39, 40, 41, 42, 43, 44]
ivr_test_proportion = 0.2
sys_config = SystemConfig.select().where(SystemConfig.key ==
'IVR_TEST_PROPORTION').first()
if sys_config and sys_config.value:
ivr_test_proportion = float(sys_config.value)
t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.
status == AutoIVRStatus.AVAILABLE.value)
t2_dict = defaultdict(list)
for ivr in t2_ivrs:
t2_dict[ivr.group].append(ivr.id)
test_ivr_ids = []
for group, ivr_ids in t2_dict.items():
number = ceil(len(ivr_ids) * ivr_test_proportion)
test_ivr_ids += ivr_ids[:number]
if not test_ivr_ids:
return
q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.
group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()
<|reserved_special_token_0|>
@action(MessageAction.APP_MERGE)
@deprecated(version='1.0', reason='This function will be removed soon')
def app_merge(payload, msg_id):
sql = """
select *
from (
select a.id as id
from dashboard.application as a
inner join repayment.bill2 as b on b.external_id = a.id
where not exists (
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.app = 'DanaCepat'
and a.is_first_loan = 1
and a.apply_at < '2018-08-23 20:50:00'
and b.overdue_days between 1 and 3
and b.status != 2) result
where not exists (
select 1
from bomber.application as a
where a.cycle = 1
and a.status = 4
and a.id = result.id
)
"""
cursor = readonly_db.get_cursor()
cursor.execute(sql)
new_data = cursor.fetchall()
cursor.close()
if new_data:
bomber = [103, 104]
for d in new_data:
app_id = {'id': d[0]}
application_overdue(app_id, None)
Application.update(status=ApplicationStatus.AB_TEST.value,
latest_bomber=random.choice(bomber), ptp_bomber=None).where(
Application.id == d[0]).execute()
logging.warning('add new app success')
ptp = date.today() - timedelta(days=1)
del_sql = (
"""
select a.id
from bomber.application as a
where exists(
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.cycle = 1
and a.status = 4
and (a.promised_date is null or a.promised_date < "%s")
"""
% ptp)
cursor = readonly_db.get_cursor()
cursor.execute(del_sql)
del_date = cursor.fetchall()
cursor.close()
if del_date:
return
ids = list()
for d in del_date:
ids.append(d[0])
Application.update(status=ApplicationStatus.UNCLAIMED.value,
latest_bomber=None).where(Application.id << ids).execute()
@action(MessageAction.APPLICATION_BOMBER)
def application_overdue(payload, msg_id):
application_id = payload['id']
sub_bill_id = payload['bill_sub_id']
local_app = Application.select().where(Application.external_id ==
application_id).order_by(Application.finished_at).first()
if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:
logging.info('application %s overdue, already exists', application_id)
add_contact(local_app)
return
if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:
overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==
sub_bill_id, OverdueBillR.external_id == application_id)
if overdue_bill.exists():
logging.info(
'application %s,sub_bill_id %s overdue, already exists' % (
application_id, sub_bill_id))
return
try:
sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
sub_bill = sub_bill[0]
except Exception:
logging.error(
'application %s overdue, get sub_bill info failed:Request To repayment Error'
, application_id)
return
if sub_bill['status'] == 2:
logging.error('application %s overdue, but bills already cleared',
application_id)
return
overdue_days = sub_bill.get('overdue_days', 0)
if overdue_days == 0:
logging.info('application {} no overdue'.format(str(application_id)))
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
logging.error('get application %s failed: Request to GoldenEye.',
application_id)
return
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
apply_history = Dashboard().get('/users/%s/apply-history' % user_id)
if not apply_history.ok:
logging.error(
'get user %s apply history failed: Request to Dashboard Failed.',
user_id)
return
history = apply_history.json().get('data')
loan_success_times = len([(1) for i in history if i['status'] in [80,
90, 100, 70] and i['id'] != gold_app['id']])
id = application_id
type = ApplicationType.CASH_LOAN.value
bill_id = sub_bill.get('bill_id')
amount = sub_bill.get('amount')
amount_net = sub_bill.get('amount_net')
interest_rate = sub_bill.get('interest_rate')
overdue_days = sub_bill.get('overdue_days')
origin_due_at = sub_bill.get('origin_due_at')
sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,
'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),
'overdue_days': overdue_days, 'origin_due_at': origin_due_at,
'amount': amount, 'amount_net': amount_net, 'interest_rate':
interest_rate, 'external_id': application_id}
if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:
if local_app and local_app.status != ApplicationStatus.REPAID.value:
sub_overdue_bill['collection_id'] = local_app.id
local_app.amount += amount
local_app.amount_net += amount_net
local_app.save()
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('application %s,sub_bill_id:%s overdue created' %
(application_id, sub_bill_id))
return
else:
id = idg()
type = ApplicationType.CASH_LOAN_STAGING.value
sub_overdue_bill['collection_id'] = id
ptp_info = BombingHistory.filter(BombingHistory.application == id).first()
promised_amount = ptp_info and ptp_info.promised_amount
promised_date = ptp_info and ptp_info.promised_date
application = Application.create(id=id, user_id=gold_app['user_id'],
user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[
'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],
contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(
'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=
birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(
gold_app.get('id_ektp')), profile_province=(gold_app.get(
'profile_province') or {}).get('name'), profile_city=(gold_app.get(
'profile_city') or {}).get('name'), profile_district=(gold_app.get(
'profile_district') or {}).get('name'), profile_residence_time=
gold_app.get('profile_residence_time'), profile_residence_type=
gold_app.get('profile_residence_type'), profile_address=gold_app.
get('profile_address'), profile_education=gold_app.get(
'profile_education'), profile_college=(gold_app.get(
'profile_college') or {}).get('name'), job_name=gold_app.get(
'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get
('job_bpjs'), job_user_email=gold_app.get('job_user_email'),
job_type=gold_app.get('job_type'), job_industry=gold_app.get(
'job_industry'), job_department=gold_app.get('job_department'),
job_province=(gold_app.get('job_province') or {}).get('name'),
job_city=(gold_app.get('job_city') or {}).get('name'), job_district
=(gold_app.get('job_district') or {}).get('name'), job_address=
gold_app.get('job_address'), amount=amount, amount_net=amount_net,
interest_rate=interest_rate, term=gold_app.get('term'),
origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=
sub_bill.get('repay_at'), loan_success_times=loan_success_times,
arrived_at=datetime.now(), follow_up_date=datetime.now(),
promised_amount=promised_amount, promised_date=promised_date,
external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=
datetime.now())
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('overdue application %s created', application_id)
Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)
add_contact(application)
<|reserved_special_token_0|>
def get_contact_from_mongo(number):
if not number:
return []
query = TotalContact.objects(src_number=number, source__in=TotalContact
.available()).order_by('source')
lst = []
for c in query:
relation = TotalContact.relationship(c.source)
if relation == -1:
continue
source = TotalContact.str_source(c.source)
if not source:
continue
lst.append({'related_number': c.dest_number, 'source': source,
'is_calc': c.is_calc, 'total_count': c.total_count,
'total_duration': c.total_duration, 'relation': relation,
'name': c.dest_name})
return lst
<|reserved_special_token_0|>
def check_key_not_none(payload, keys):
for key in keys:
if payload.get(key) is None:
logging.error('Missing args {}'.format(str(key)))
return False
return True
<|reserved_special_token_0|>
@action(MessageAction.BILL_RELIEF)
def bill_relief(payload, msg_id):
"""已废弃"""
bill = payload['head_bill']
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_row = Application.update(repay_at=repay_at).where(Application.
id == bill['external_id']).execute()
logging.info('application %s bill relief done', bill['external_id'])
return updated_row
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)
def calc_overdue_days_over(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query = Application.update(overdue_days=overdue_days).where(Application
.status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.
overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)
updated_rows_count = query.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count)
try:
calc_overdue_days_over_instalment()
except Exception as e:
logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))
apps = Application.filter(Application.status << [ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value], Application.overdue_days > 95,
Application.promised_date.is_null(True) | (fn.DATE(Application.
promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {
'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
<|reserved_special_token_0|>
def calc_overdue_days_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
today_now_time = datetime.now()
month_first_day = today_now_time.replace(day=1, hour=1, minute=30,
second=0, microsecond=0)
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days <= 95)
updated_rows_count = query.execute()
logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %
(updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.
created_at, OverdueBill.collection_id, OverdueBill.overdue_days
).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.
collection_id == Application.id).where(Application.status ==
status, Application.type == ApplicationType.CASH_LOAN_STAGING.value
)
app_update = {}
for ob in overdue_bills:
if (ob.status == ApplicationStatus.REPAID.value and ob.
created_at < month_first_day):
continue
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)
def automatic_escalation(payload, msg_id):
app_ids = payload.get('application_list', [])
if not app_ids:
return
apps = Application.select().where(Application.id.in_(app_ids),
Application.status != ApplicationStatus.REPAID.value)
for a in apps:
new_cycle = application_entry_different_calculations(a)
if a.overdue_days < 90:
logging.info(
'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'
.format(a.id, new_cycle, a.cycle, a.overdue_days))
if new_cycle > a.cycle:
with db.atomic():
if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle
.C1B.value):
bomber_id = (a.latest_bomber_id if a.latest_bomber_id else
a.cycle)
DispatchAppHistory.update(out_at=datetime.now(),
out_overdue_days=a.overdue_days).where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == bomber_id).execute()
Escalation.create(application=a.id, type=EscalationType.
AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,
current_cycle=a.cycle, escalate_to=new_cycle,
current_bomber_id=a.latest_bomber)
dis_app_update = DispatchApp.update(status=DisAppStatus.
ABNORMAL.value).where(DispatchApp.application == a.id)
dis_app_update.execute()
a.cycle = new_cycle
a.last_bomber = a.latest_bomber
a.status = ApplicationStatus.UNCLAIMED.value
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
if new_cycle == Cycle.C1B.value:
a.C1B_entry = datetime.now()
elif new_cycle == Cycle.C2.value:
a.C2_entry = datetime.now()
elif new_cycle == Cycle.C3.value:
a.C3_entry = datetime.now()
a.save()
logging.info('automatic escalation done')
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_CALC_SUMMARY)
def cron_summary(payload, msg_id):
"""已废弃"""
employees = Bomber.select(Bomber, Role).join(Role)
summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,
'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,
'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':
0, 'sms_sent': 0} for i in employees}
now_date = date.today()
cal_date = now_date - timedelta(days=1)
claimed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('claimed')).where(fn.DATE(Application.
claimed_at) == cal_date, Application.status << [ApplicationStatus.
PROCESSING.value, ApplicationStatus.REPAID.value], Application.
latest_bomber.is_null(False)).group_by(Application.latest_bomber)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False)).group_by(
Application.latest_bomber)
completed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('completed')).where(Application.
latest_bombing_time.is_null(False), fn.DATE(Application.
latest_bombing_time) == cal_date, Application.latest_bomber.is_null
(False)).group_by(Application.latest_bomber)
escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(
Escalation.id).alias('escalated')).where(fn.DATE(Escalation.
created_at) == cal_date, Escalation.type == EscalationType.
AUTOMATIC.value, Escalation.current_bomber.is_null(False),
Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation
.current_bomber)
transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).
alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==
cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(
Transfer.operator)
promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(
BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.
created_at) == cal_date, BombingHistory.result == BombingResult.
HAS_PROGRESS.value).group_by(BombingHistory.bomber)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,
RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.
current_bomber)
calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id
).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==
cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)
calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.
record_id).alias('calls_connected')).where(fn.DATE(CallLog.
time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==
'1').group_by(CallLog.user_id)
sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(
ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_
(ConnectType.sms()), ConnectHistory.created_at >= cal_date,
ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)
for i in claimed:
summary[i.latest_bomber_id]['claimed'] += i.claimed
for i in completed:
summary[i.latest_bomber_id]['completed'] += i.completed
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
for i in escalated:
summary[i.current_bomber_id]['escalated'] += i.escalated
for i in transferred:
summary[i.operator_id]['transferred'] += i.transferred
for i in promised:
summary[i.bomber_id]['promised'] += i.promised
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in calls_made:
summary[int(i.user_id)]['calls_made'] += i.calls_made
for i in calls_connected:
summary[int(i.user_id)]['calls_connected'] += i.calls_connected
for i in sms_sent:
summary[i.operator_id]['sms_sent'] += i.sms_sent
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'claimed': data['claimed'], 'completed': data['completed'],
'cleared': data['cleared'], 'escalated': data['escalated'],
'transferred': data['transferred'], 'promised': data['promised'
], 'amount_recovered': data['amount_recovered'], 'calls_made':
data['calls_made'], 'calls_connected': data['calls_connected'],
'sms_sent': data['sms_sent'], 'date': cal_date})
if insert_args:
Summary.insert_many(insert_args).execute()
cycle_args = []
escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(
Escalation.id).alias('escalated_in')).where(Escalation.status ==
ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==
cal_date).group_by(Escalation.escalate_to)
for i in escalated_in:
cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.
escalated_in, 'date': cal_date})
amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM
(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)
for i in amount_recovered_total:
amount_recovered_total = i.principal_part + i.late_fee_part
cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':
amount_recovered_total, 'date': cal_date})
if cycle_args:
Summary.insert_many(cycle_args).execute()
logging.info('cal summary done')
send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})
@action(MessageAction.BOMBER_CALC_SUMMARY2)
def cron_summary2(payload, msg_id):
"""已废弃,定时任务还在执行,具体情况待确定"""
cal_date = date.today() - timedelta(days=1)
employees = Bomber.select(Bomber, Role).join(Role)
auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,
AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')
).where(fn.DATE(AutoCallActions.created_at) == cal_date)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.
is_null(False), RepaymentLog.is_bombed == True)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False))
auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,
AutoCallActions.result)
amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)
cleared = cleared.group_by(Application.latest_bomber)
summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,
'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered':
0} for e in employees}
for a in auto_call_actions:
summary[a.bomber_id]['answered_calls'] += a.count
if a.result == AutoCallResult.PTP.value:
summary[a.bomber_id]['ptp'] += a.count
if a.result == AutoCallResult.FOLLOW_UP.value:
summary[a.bomber_id]['follow_up'] += a.count
if a.result == AutoCallResult.NOT_USEFUL.value:
summary[a.bomber_id]['not_useful'] += a.count
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'answered_calls': data['answered_calls'], 'ptp': data['ptp'],
'follow_up': data['follow_up'], 'not_useful': data['not_useful'
], 'cleared': data['cleared'], 'amount_recovered': str(data[
'amount_recovered']), 'date': cal_date})
if insert_args:
Summary2.insert_many(insert_args).execute()
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_DISCOUNT_APPROVED)
def bomber_discount_approved(payload, msg_id):
app_id = payload['id']
msg_type = payload['msg_type']
discount_to = payload['discount_to']
effective_to = payload['effective_to']
application = Application.filter(Application.id == app_id).first()
if not application:
logging.error(
'discount approved msg send failed application %s not found',
app_id)
return
template = Template.select(Template.text, Template.app).where(Template.
type == ConnectType.AUTO_SMS.value, Template.id << Template.
get_auto_sms_tpl(msg_type), Template.app == application.app).first()
if not template:
logging.error('discount approved msg send failed template %s not found'
, msg_type)
return
promised_date = None
if application.promised_date:
promised_date = application.promised_date.strftime('%d-%m-%Y')
tpl_data = {'user_name': application.user_name, 'due_days': application
.overdue_days, 'app_name': application.app, 'phone': application.
user_mobile_no, 'cs_number': cs_number_conf.get(application.app,
'02150202889'), 'promised_date': promised_date, 'discount_to':
discount_to, 'effective_to': effective_to}
content = template.text.format(**tpl_data)
data_list = [{'receiver': '62' + application.user_mobile_no, 'content':
content, 'title': ''}]
send_sms(data_list, msg_type, application.app)
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_AUTO_CALL_LIST)
def bomber_auto_call_list(payload, msg_id):
with db.atomic():
bomber_dispatch_app()
dispatch_instalment_app()
dis_apps = DispatchApp.select(DispatchApp.application).where(
DispatchApp.status == DisAppStatus.NORMAL.value)
c1_apps = Application.select(Application.id, Application.cycle,
Application.follow_up_date, Application.called_times).where(Application
.status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.
AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.
is_rejected == False, Application.promised_date.is_null(True) | (fn
.DATE(Application.promised_date) < datetime.today().date())).order_by(
Application.overdue_days, Application.apply_at)
dis_apps_ids = [da.application_id for da in dis_apps]
insert_args = []
for a in c1_apps:
if a.id in dis_apps_ids:
continue
insert_args.append({'application': a.id, 'cycle': a.cycle,
'follow_up_date': a.follow_up_date, 'called_times': 1 if a.
called_times else 0, 'description': 'init'})
if not insert_args:
logging.error('no application need auto call')
with db.atomic():
AutoCallList.delete().execute()
for idx in range(0, len(insert_args), 100):
AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()
for idx in range(0, len(insert_args), 100):
application_list = [i['application'] for i in insert_args[idx:idx +
100]]
send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {
'application_list': application_list})
logging.info('bomber generate auto call list finished')
send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})
class ChangeBomberTool(object):
@staticmethod
def in_record(bomber_id, ids, bd):
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.
alias('application_id'), R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(
'expected_out_time'), Application.overdue_days.alias(
'entry_overdue_days')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << ids)
Application.update(latest_bomber=bomber_id).where(Application.id.
in_(ids)).execute()
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bd),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@staticmethod
def out_record(a, bd):
_id = str(a.id)
DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a
.overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]
.get('principal_paid')), out_late_fee_pending=bd[_id].get(
'late_fee') - bd[_id].get('late_fee_paid')).where(
DispatchAppHistory.application == a.id, DispatchAppHistory.
bomber_id == a.latest_bomber_id).execute()
a.last_bomber = a.latest_bomber
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
a.save()
@staticmethod
def classify(l, b):
if len(l) == 1:
return l[0]
_l = filter(lambda x: x['bomber'] != b, l)
return min(_l, key=lambda x: len(x['ids']))
<|reserved_special_token_0|>
def bomber_dispatch_app():
try:
c1a_dispatch_app()
except Exception as e:
logging.error('c1a_dispatch_app error:%s' % str(e))
cycle = {(1): 10, (2): 30, (3): 60, (4): 90}
apps = Application.select().where(fn.DATE(Application.C2_entry) == date
.today(), Application.type == ApplicationType.CASH_LOAN.value)
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C2.value)
apps_ids = [a.id for a in apps]
dispatch_inserts = []
start_index = 0
apps_length = len(apps_ids)
logging.warning('apps length %s' % str(apps_length))
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)
gen = CycleIter([b.id for b in bombers])
existing_list = []
end_index = start_index + int(apps_length * p.app_percentage)
logging.info('partner length %s' % str(end_index))
if not apps_ids[start_index:end_index]:
continue
bills = BillService().bill_list(application_ids=apps_ids[
start_index:end_index])
bill_dict = {bill['application_id']: bill for bill in bills}
for a_id in apps_ids[start_index:end_index]:
bomber = average_gen(gen, existing_list)
q = DispatchApp.delete().where(DispatchApp.application == a_id
).execute()
dispatch_inserts.append({'application': a_id, 'bomber': bomber,
'partner': p.id})
application = Application.select().where(Application.id == a_id
).first()
application.latest_bomber = bomber
application.status = ApplicationStatus.AB_TEST.value
application.ptp_bomber = None
application.save()
day_next_cycle = cycle.get(application.cycle
) - application.overdue_days
DispatchAppHistory.create(application=a_id, partner_id=p.id,
bomber_id=bomber, entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - Decimal(
bill_dict[a_id].get('principal_paid')),
entry_late_fee_pending=Decimal(bill_dict[a_id].get(
'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')
), expected_out_time=date.today() + timedelta(days=
day_next_cycle))
start_index = end_index
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
config = SystemConfig.prefetch(SCI.AB_TEST_C2)
c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)
c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)
c2 = apps_ids[start_index:]
if c2:
bills = BillService().bill_list(application_ids=c2)
else:
bills = []
bill_dict = {bill['application_id']: bill for bill in bills}
logging.info('c2 AB_test length: %s' % str(c2))
gen = CycleIter(c2_bomber)
existing_list = []
for c in c2:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == c).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = 46 - application.overdue_days
DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at
=datetime.now(), entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - bill_dict[c].get(
'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(
'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),
expected_out_time=date.today() + timedelta(days=day_next_cycle))
ab_test_other()
def c1a_dispatch_app():
today = datetime.today().date()
tomorrow = today + timedelta(days=1)
c1a_apps = Application.select().where(Application.status << [
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value], Application.dpd1_entry >= today, Application.dpd1_entry <
tomorrow, Application.type == ApplicationType.CASH_LOAN.value)
all_aids = [a.id for a in c1a_apps]
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C1A.value)
end = 0
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
is_del == 0)
start = end
end += int(len(all_aids) * p.app_percentage)
aids = all_aids[start:end]
bids = [b.id for b in bombers]
if not bids or not aids:
continue
average_number = get_average_number(len(aids), len(bids))
p_end = 0
for i, bid in enumerate(bids):
p_start = p_end
p_end += average_number[i]
b_aids = aids[p_start:p_end]
with db.atomic():
q = Application.update(latest_bomber=bid, status=
ApplicationStatus.AB_TEST.value).where(Application.id <<
b_aids).execute()
params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,
'application_ids': b_aids, 'dest_bomber_id': bid}
new_in_record(**params)
try:
dispatch_inserts = []
for aid in b_aids:
dispatch_inserts.append({'application': aid, 'bomber':
bid, 'partner': p.id, 'status': DisAppStatus.NORMAL
.value})
if dispatch_inserts:
q = DispatchApp.insert_many(dispatch_inserts).execute()
except Exception as e:
logging.error('c1a分件写入dispatch_app error:%s' % str(e))
<|reserved_special_token_0|>
def get_cash_bomber(bids, cycle):
cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==
0, Bomber.instalment != cycle)
cash_bids = [b.id for b in cash_bombers]
return cash_bids
<|reserved_special_token_0|>
def out_and_in_record_instalment(**kwargs):
if not kwargs.get('application_ids'):
return
out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(
DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True)).execute()
cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(kwargs['dest_bomber_id'])).alias(
'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.
alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).
alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << kwargs[
'application_ids'])
application_list = list(subquery)
for idx in range(0, len(application_list), 50):
applications = application_list[idx:idx + 50]
app_ids = [i.application_id for i in applications]
overdue_bills = OverdueBill.select().where(OverdueBill.
collection_id << app_ids)
sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]
bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)
insert_args = lambad_instalment_result(bill_list, applications)
if not insert_args:
continue
DispatchAppHistory.insert_many(insert_args).execute()
<|reserved_special_token_0|>
def c1b_dispatch_in_record(**kwargs):
app_ids = kwargs.get('apps')
partner_id = kwargs.get('partner_id', 'null')
bill_dict = kwargs.get('bill_dict')
period = kwargs.get('period')
bomber_id = kwargs.get('bomber_id')
if not all([app_ids, partner_id, bill_dict, period]):
return False
bill_dict = {str(k): v for k, v in bill_dict.items()}
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().
alias('entry_at'), Application.overdue_days.alias(
'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.id << app_ids)
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
<|reserved_special_token_0|>
def check_call_history(application):
app_create_at = application.created_at + timedelta(days=4)
if datetime.today().date() > app_create_at.date():
call_actions = CallActions.select().where(CallActions.type == 0,
CallActions.application == application.id, CallActions.
created_at > datetime.now() - timedelta(days=5))
for call in call_actions:
if call.phone_status == PhoneStatus.CONNECTED.value:
return True
return False
return True
@action(MessageAction.BOMBER_SCAVENGER)
def scavenger(payload, msg_id):
scavenger_time = -60
scavenger = SystemConfig.select().where(SystemConfig.key ==
'SCAVENGER_TIME').first()
if scavenger and scavenger.value.isdigit():
scavenger_time = -int(scavenger.value)
update_auto_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value, description='scavenger').where(AutoCallList.status ==
AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime
.now() + timedelta(minutes=scavenger_time))
count = update_auto_call_list.execute()
logging.info('scavenger processed %s application', count)
mail_box_scavenger_time = -30
mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==
'MAIL_BOX_SCAVENGER_TIME').first()
if mail_box_scavenger and mail_box_scavenger.value.isdigit():
mail_box_scavenger_time = -int(mail_box_scavenger.value)
update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.
value, AutoCallList.updated_at < datetime.now() + timedelta(minutes
=mail_box_scavenger_time))
mail_box_count = update_mail_box_call_list.execute()
logging.info('scavenger update mail box %s', mail_box_count)
update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value
).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.
updated_at < datetime.now() + timedelta(minutes=-30))
ivr_result = update_auto_ivr.execute()
logging.info('scavenger update %s ivr' % ivr_result)
<|reserved_special_token_0|>
@action(MessageAction.REPORT_BOMBER_COLLECTION)
def report_bomber_collection(payload, msg_id):
start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)
).scalar()
now = datetime.now()
if start_date and str(start_date) == str(now)[:10]:
return
end_date = str(now + timedelta(days=1))[:10]
start_date = str(now)[:10]
dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))
all_overdue_loan_sql1 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
GROUP BY 1;
"""
s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()
d1 = OperatedDict(s_data1)
all_overdue_loan_sql2 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE DATE(ba.follow_up_date) > CURDATE()
AND ba.called_counts = 0
GROUP BY 1;
"""
s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()
d2 = OperatedDict(s_data2)
overdue_loans_entered_into_predict_call_system_sql = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE ba.called_counts >= 1
GROUP BY 1;
"""
s_data3 = readonly_db.execute_sql(
overdue_loans_entered_into_predict_call_system_sql).fetchall()
d3 = OperatedDict(s_data3)
loans_completed_sql = """
SELECT ba.cycle, COUNT(DISTINCT ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()
d4 = OperatedDict(s_data4)
connected_calls_automatic_sql = """
SELECT ba.cycle, COUNT(ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()
d5 = OperatedDict(s_data5)
connected_calls_manual_sql = """
SELECT bb.cycle, COUNT(bb.id)
FROM bomber.bombing_history bb
WHERE DATE(bb.created_at) = curdate()
AND (bb.bomber_id < 150 OR bb.bomber_id > 200)
GROUP BY bb.cycle;
"""
s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()
d6 = OperatedDict(s_data6)
logging.info('Directly get data from database successfully.')
c1 = d1 - d2
c2 = d3
c3 = c2 / c1
c4 = d4
c5 = c4 / c2
c6 = d5
c7 = c6 / c4
c8 = d6
c9 = OperatedDict(get_agent())
c10 = (c6 + c8) / c9
try:
c11 = average_call_duration_team(start_date, end_date)
except AttributeError:
c11 = {}
lst = []
for i in range(1, 5):
lst.append({'apply_date': start_date, 'cycle': dct[i],
'all_overdue_loan': c1.get(i, 0),
'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),
'of_overdue_loans_entered_into_predict_call_system': round(c3.
get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),
'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *
100, 1), 'connected_calls_automatic': c6.get(i, 0),
'connected_calls_automatic_completed_loans': round(c7.get(i, 0),
1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i,
0), 'average_calls_agent': round(c10.get(i, 0), 1),
'average_call_duration_team': round(c11.get(i, 0), 1)})
ReportCollection.insert_many(lst).execute()
logging.info('report_bomber_collection:Done!')
<|reserved_special_token_0|>
def new_out_record(**kwargs):
if not kwargs['application_ids']:
return
DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.
bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.
application << kwargs['application_ids'], DispatchAppHistory.out_at
.is_null(True)).execute()
if kwargs.get('month_dispatch'):
return
try:
Application.update(ptp_bomber=None).where(Application.id << kwargs[
'application_ids']).execute()
except Exception as e:
logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[
'application_ids'], str(e)))
<|reserved_special_token_0|>
def end_old_application(old_app, paid=False):
if paid:
if old_app.status == OldLoanStatus.WAITING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return old_app.application_id
end_date = old_app.end_date
now = datetime.now()
if now >= max(end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
old_app.save()
return old_app.application_id
<|reserved_special_token_0|>
def get_before_bomber(date_time):
begin_time = str(date_time - timedelta(days=7))
end_time = str(date_time)
old_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at<date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (begin_time, begin_time))
old_data = run_one_sql(old_sql)
new_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at> '%s'
and created_at<'%s'
and overdue_days=1;
"""
% (begin_time, end_time))
new_data = run_one_sql(new_sql)
dpd4_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>date_add('%s',interval 1 day)
and created_at< date_add('%s',interval 1 day)
and overdue_days=4;
"""
% (begin_time, end_time))
dpd4_data = run_one_sql(dpd4_sql)
dpd2_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at< date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (end_time, end_time))
dpd2_data = run_one_sql(dpd2_sql)
all_money = old_data + new_data
repayment = all_money - dpd4_data - dpd2_data
pro = 0
if all_money:
pro = repayment / all_money * 100
RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,
proportion=pro, repayment=repayment)
<|reserved_special_token_0|>
def get_c1a_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=19)
repayment_sql = (
"""
select
sum(b.principal_part+b.late_fee_part) as paid_amount,
cdt
from
(select
br.principal_part, br.late_fee_part,
date(cdt) as cdt, br.repay_at, br.application_id
from (
select ba.id, ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
if not repayment:
return
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
for d in repayment:
repay = d[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
d[1], RepaymentReportInto.cycle == Cycle.C1A.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = round(repay, 3)
pro = repay / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<|reserved_special_token_0|>
def get_c2_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
not_contain_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=37)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_money and repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(
not_contain_money, 3), proportion='0', repayment=0,
is_first_loan=is_first_loan, contain_out=ContainOut.
NOT_CONTAIN.value)
for repay in not_contain_repay:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(all_money, 3),
proportion='0', repayment=0, is_first_loan=
is_first_loan, contain_out=ContainOut.CONTAIN.value)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
def get_c3_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id, ba.c3_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=30)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c3_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c3_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = '%s'
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 4
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C3.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
if not repayment:
return
for repay in repayment:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
repay[1], RepaymentReportInto.cycle == Cycle.C3.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = repay_money
pro = 0
if report.all_money and int(report.all_money):
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<|reserved_special_token_0|>
def get_static_bomber(begin_date):
active_date = begin_date - timedelta(days=8)
bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),
BomberR.last_active_at.alias('active')).where(BomberR.
last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])
summary = []
for bomber in bombers:
summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':
bomber.role.cycle, 'work_ind': 0})
SummaryBomber.insert_many(summary).execute()
<|reserved_special_token_0|>
@time_logger
def get_call_and_made(end_date, begin_date, real_time_query=False):
call_sql = (
"""
select
bomber_id,
count(case when relationship is not null then application_id end)
as 'call_cnt',
count(distinct case when relationship is not null then
application_id end) as 'call_case',
count(case when phone_status=4 then application_id end) as 'connect',
count(distinct case when phone_status=4 then application_id end)
as 'connect_case'
from (
select bomber_id,application_id,phone_status, cycle, relationship
from bomber.call_actions ba
where created_at>'%s' and created_at<'%s'
and type in (0, 1)
) a
group by 1
"""
% (begin_date, end_date))
calls = run_all_sql(call_sql)
if real_time_query:
return calls
for call in calls:
bomber, call_cnt, case_made, connect_cnt, case_connect = call
SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,
call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(
SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date
).execute()
return calls
@time_logger
def get_claimed_cnt(end_date, begin_date, real_time_query=False):
table_date = begin_date - timedelta(days=30)
claimed_sql = (
"""
SELECT
COUNT( `t1`.`application_id` ) AS cnt,
`t1`.`bomber_id` AS bomber_id
FROM
`dispatch_app_history` AS t1
WHERE
( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null )
AND ( `t1`.`bomber_id` != 1000 )
AND ( `t1`.`partner_id` IS null )
AND ( `t1`.`entry_at` > '%s' )
AND ( `t1`.`entry_at` < '%s' )
GROUP BY
`t1`.`bomber_id`
"""
% (begin_date, table_date, end_date))
claimeds = run_all_sql(claimed_sql)
if real_time_query:
return claimeds
for claimed in claimeds:
cnt, bomber_id = claimed
SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == bomber_id).execute()
return claimeds
def get_sms_data(end_data, begin_data):
all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(
'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')
).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.
created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())
).group_by(ConnectHistoryR.operator)
for sms in all_sms:
SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==
begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()
return all_sms
<|reserved_special_token_0|>
@action(MessageAction.SUMMARY_NEW)
def summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_active_bomber(begin_date)
get_call_and_made(end_date, begin_date)
get_claimed_cnt(end_date, begin_date)
get_sms_data(end_date, begin_date)
get_ptp_data(end_date, begin_date)
get_recover_amount(end_date, begin_date)
get_unfollowed(begin_date)
get_unfollowed_call(begin_date)
def get_new_case_amount(begin_date, end_date):
all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.
entry_late_fee_pending + DispatchAppHistoryR.
entry_principal_pending).alias('pending'), DispatchAppHistoryR.
bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')
).where(DispatchAppHistoryR.entry_at > begin_date,
DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.
partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)
for case in all_case:
SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt
=case.cnt).where(SummaryBomber.bomber_id == case.bomber_id,
SummaryBomber.time == begin_date).execute()
return all_case
<|reserved_special_token_0|>
@time_logger
def get_new_case_call(begin_date, end_date, real_query_time=False):
sql = (
"""
SELECT
bd.bomber_id,
count( DISTINCT bd.application_id )
FROM
bomber.dispatch_app_history bd
INNER JOIN bomber.call_actions bc
ON bd.application_id = bc.application_id
AND bd.bomber_id = bc.bomber_id
AND date( bd.entry_at ) = date( bc.created_at )
WHERE
entry_at > '%s'
AND entry_at < '%s'
AND partner_id IS NULL
GROUP BY 1
"""
% (begin_date, end_date))
new_case_calls = run_all_sql(sql)
if real_query_time:
return new_case_calls
for call in new_case_calls:
SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber
.bomber_id == call[0], SummaryBomber.time == begin_date).execute()
return new_case_calls
@time_logger
def get_calltime_avg(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction,
count( 1 ) AS auto_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
AND an.recording is not null
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]][0] += data[1]
result[data[0]][1] += data[2]
continue
result[data[0]] = [data[1], data[2]]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=
value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == key).execute()
return result
def get_no_calltime_avg(begin_date, end_date):
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND (an.status!='ANSWERED' or an.recording is null)
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
for data in manuals:
SummaryBomber.update(calltime_no_case_sum=data[1],
calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /
data[2] if data[2] else 0).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == data[0]).execute()
@time_logger
def get_calltime_sum(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == key).execute()
return result
<|reserved_special_token_0|>
@action(MessageAction.UPDATE_SUMMARY_NEW)
def update_summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'UPDATE_SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_new_case_amount(begin_date, end_date)
get_kp_cleared(begin_date, end_date)
get_kp_today(begin_date, end_date)
get_ptp_cnt(begin_date, end_date)
get_ptp_call_cnt(begin_date, end_date)
get_new_case_cleared(begin_date, end_date)
get_new_case_call(begin_date, end_date)
get_calltime_avg(begin_date, end_date)
get_no_calltime_avg(begin_date, end_date)
get_calltime_sum(begin_date, end_date)
def get_cycle_claimed(begin_date, end_date):
sql = (
"""
select cycle,count(1)
from bomber.application where cycle in (1,2,3,4)
and (finished_at is null or (finished_at>'%s'))
and created_at>'2018-09-01'
group by 1
"""
% begin_date)
result = run_all_sql(sql)
return result
@time_logger
def cycle_new_case(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( ba.id ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.created_at ) = bo.which_day_overdue
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c1b_entry ) = bo.which_day_overdue
WHERE
c1b_entry > '%s'
AND c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c2_entry ) = bo.which_day_overdue
WHERE
c2_entry > '%s'
AND c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c3_entry ) = bo.which_day_overdue
WHERE
c3_entry > '%s'
AND c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
all_datas = run_all_sql(sql)
if real_time_query:
return all_datas
for data in all_datas:
SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()
return all_datas
@time_logger
def get_cycle_new_case_call(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.created_at ) = date( bc.created_at )
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c1b_entry ) = date( bc.created_at )
WHERE
ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c2_entry ) = date( bc.created_at )
WHERE
ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c3_entry ) = date( bc.created_at )
WHERE
ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
cycle_datas = run_all_sql(sql)
if real_time_query:
return cycle_datas
for data in cycle_datas:
SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber
.time == begin_date, SummaryBomber.cycle == data[0],
SummaryBomber.bomber_id == data[0]).execute()
return cycle_datas
def get_cycle_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
'1' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.created_at ) = date( br.repay_at )
WHERE ba.created_at > '%s'
AND ba.created_at < '%s'
GROUP BY 1, 2 ) a
UNION
SELECT
'2' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
GROUP BY 1, 2) a
"""
% (begin_date, end_date, begin_date, end_date))
cycle_cleared = run_all_sql(sql)
for i in cycle_cleared:
SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber
.cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.
time == begin_date).execute()
<|reserved_special_token_0|>
def get_change_bomber():
cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle
.C3.value}
result = {}
bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,
BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=
BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.
created_at) == date.today(), BomberLog.role_id << list(
cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.
instalment == 0).dicts()
for b_log in bomber_logs:
cycle = cycle_role_map.get(b_log['role_id'])
group_id = b_log['group_id']
if cycle in result:
if group_id not in result[cycle]:
result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],
'new_ids': []}
else:
result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],
'new_ids': []}}
if b_log['operation'] == 0:
result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])
if result:
bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map
.keys())), Bomber.is_del == 0, Bomber.instalment == 0)
for b in bombers:
cycle_result = result.get(cycle_role_map[b.role_id], {})
role_result = cycle_result.get(b.group_id)
if not role_result:
continue
role_result['new_ids'].append(b.id)
resutl_list = []
for cycle, group_dict in result.items():
resutl_list.extend(list(group_dict.values()))
return resutl_list
return []
<|reserved_special_token_0|>
def get_surplus_application(new_ids, del_ids, average_nums, classified_apps):
surplus_apps = []
for del_id in del_ids:
del_res = classified_apps.get(del_id, {})
p_list = del_res.get('p_list', [])
np_list = del_res.get('np_list', [])
del_res['need_num'] = -(len(p_list) + len(np_list))
del_res['to_list'] = np_list + p_list
surplus_apps.extend(p_list)
surplus_apps.extend(np_list)
for index, bid in enumerate(new_ids):
average = average_nums[index]
bomber_app = classified_apps.get(bid)
if not bomber_app:
bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid
).first()
bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':
[], 'np_num': 0, 'need_num': average, 'partner_id': bomber.
partner_id if bomber else ''}
classified_apps[bid] = bomber_app
else:
p_num = len(bomber_app['p_list'])
np_num = len(bomber_app['np_list'])
if p_num > average:
bomber_app['need_num'] = -np_num
else:
bomber_app['need_num'] = average - (p_num + np_num)
bomber_app['p_num'] = p_num
bomber_app['np_num'] = np_num
if bomber_app['need_num'] < 0:
random.shuffle(bomber_app['np_list'])
res_over = bomber_app['np_list'][:-bomber_app['need_num']]
bomber_app['to_list'] = res_over
surplus_apps.extend(res_over)
classified_apps_list = sorted(classified_apps.values(), key=lambda x: x
['need_num'], reverse=True)
return surplus_apps, classified_apps_list
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)
def bomber_dispatch_applications(payload, msg_id):
change_bombers = get_change_bomber()
instalment_change_bombers = get_instalment_change_bomber()
params = {ApplicationType.CASH_LOAN.value: change_bombers,
ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}
for type, bombers in params.items():
change_bomber_dispatch_apps(change_bombers=bombers, type=type)
<|reserved_special_token_0|>
def get_summary_daily_time():
mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')
mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')
now_date = datetime.now()
now_date_time = now_date.time()
today_str = str(now_date.date())
if now_date_time < mid_time_t1.time():
yes_date = now_date - timedelta(days=1)
yes_date_str = str(yes_date.date())
begin_str = yes_date_str + ' 17:20:00'
end_str = today_str + ' 00:00:00'
elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():
begin_str = today_str + ' 00:00:00'
end_str = today_str + ' 12:40:00'
else:
begin_str = today_str + ' 12:40:00'
end_str = today_str + ' 17:20:00'
begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')
end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')
summary_datetime = now_date - timedelta(minutes=30)
summary_date = summary_datetime.date()
return begin_time, end_time, summary_date
@action(MessageAction.SUMMARY_DAILY)
def summary_daily_data(payload, msg_id):
begin_time, end_time, summary_date = get_summary_daily_time()
call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.
bomber_id, CallActionsR.application_id, CallActionsR.promised_date,
CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(
CallActionsR.created_at >= begin_time, CallActionsR.created_at <
end_time, CallActionsR.type << (0, 1))
summary_dailys = {}
for call in call_actions:
if call.bomber_id not in summary_dailys:
summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.
bomber_id, 'summary_date': str(summary_date)}
if call.name and call.number:
summary_dailys[call.bomber_id]['call_cnt'] += 1
if call.promised_date:
summary_dailys[call.bomber_id]['ptp_cnt'] += 1
C1_sql = (
"""
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,a.cycle
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,2,4,5)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
"""
% (begin_time, end_time))
C1_repayment = run_all_sql(C1_sql)
other_sql = (
"""
select current_bomber_id,sum(pay_amount) as pay_amount,cycle
from (
select application_id,current_bomber_id,pay_amount,repay_at,cycle
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at,
br.cycle
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (3,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
"""
% (begin_time, end_time))
other_repayment = run_all_sql(other_sql)
all_repayment = C1_repayment + other_repayment
for res in all_repayment:
bomber_id, pay_amount, cycle = res
if bomber_id in summary_dailys:
summary_dailys[bomber_id]['repayment'] += pay_amount
else:
summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': cycle, 'repayment': pay_amount, 'bomber_id':
bomber_id, 'summary_date': str(summary_date)}
insert_values = list(summary_dailys.values())
if insert_values:
SummaryDaily.insert_many(insert_values).execute()
<|reserved_special_token_0|>
def month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):
apps = list(apps)
np_apps = list(np_apps)
random.shuffle(np_apps)
apps_len = len(apps)
np_apps_len = len(np_apps)
end = 0
all_app_precentage = 0
partners = Partner.select().where(Partner.cycle == cycle, Partner.
status == PartnerStatus.NORMAL.value)
for p in partners:
all_app_precentage += p.app_percentage
for partner in partners:
bombers = Bomber.select().where(Bomber.partner == partner.id,
Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.
value)
bids = {b.id: b for b in bombers}
if len(bids) == 0:
logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))
continue
start = end
if np_apps_len >= int(apps_len * all_app_precentage):
end = start + int(apps_len * partner.app_percentage)
else:
end = start + int(np_apps_len * partner.app_percentage /
all_app_precentage)
partner_app = np_apps[start:end]
dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)
np_apps = np_apps[end:]
return np_apps
def month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):
sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',
(4): 'AB_TEST_C3'}
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0
)
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
bombers = bombers.where(Bomber.instalment == 0)
bids = {b.id: b for b in bombers}
if cycle == Cycle.C1A.value:
np_ids = [a['id'] for a in np_apps]
np = Application.update(status=ApplicationStatus.PROCESSING.value,
ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids
).execute()
bomber_app_logs = app_logs.get(cycle, {})
out_param = {'application_ids': bomber_app_logs.get('to_ids', []),
'month_dispatch': 1, 'src_bomber_id': cycle}
new_out_record(**out_param)
in_param = {'cycle': cycle, 'application_ids': np_ids,
'dest_bomber_id': cycle}
new_in_record(**in_param)
bomber_app_logs['need_num'] = len(np_apps)
bomber_app_logs['form_ids'] = np_ids
bomber_app_logs['status'] = 1
else:
dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)
dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)
def dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,
type=ApplicationType.CASH_LOAN.value):
apps = list(apps)
random.shuffle(apps)
bids_list = list(bids.keys())
if len(bids_list) <= 0:
logging.info('get_dispatch_app_to_bomber no bids')
return
average_num = get_average_number(len(apps), len(bids_list))
bomber_end = 0
with db.atomic():
for index, bid in enumerate(bids_list):
current_bomber = bids.get(bid)
bomber_app_logs = app_logs.get(bid, {})
bomber_start = bomber_end
bomber_end = bomber_start + average_num[index]
bomber_apps = apps[bomber_start:bomber_end]
from_p, from_np, from_ids, status = [], [], [], 0
for ba in bomber_apps:
promised_date = ba.get('promised_date')
from_ids.append(ba['id'])
if promised_date and promised_date.date() >= date.today():
from_p.append(ba['id'])
else:
from_np.append(ba['id'])
app_status = ApplicationStatus.AB_TEST.value
if (cycle == Cycle.C1A.value and not out_partner and type ==
ApplicationType.CASH_LOAN.value):
app_status = ApplicationStatus.PROCESSING.value
if from_p:
p = Application.update(ptp_bomber=bid, latest_bomber=bid,
status=app_status).where(Application.id << from_p).execute(
)
p_ids = bomber_app_logs.get('p_ids', []) + from_p
bomber_app_logs['p_ids'] = p_ids
if from_np:
np = Application.update(latest_bomber=bid, ptp_bomber=None,
status=ApplicationStatus.AB_TEST.value).where(
Application.id << from_np).execute()
np_ids = bomber_app_logs.get('np_ids', []) + from_np
bomber_app_logs['np_ids'] = np_ids
in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.
partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}
if type == ApplicationType.CASH_LOAN.value:
out_param = {'src_bomber_id': bid, 'application_ids':
bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}
new_out_record(**out_param)
new_in_record(**in_param)
else:
out_and_in_record_instalment(**in_param)
bomber_app_logs['status'] = 1
need_num = bomber_app_logs.get('need_num', 0) + average_num[index]
bomber_app_logs['need_num'] = need_num
all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids
bomber_app_logs['form_ids'] = all_form_ids
if not out_partner:
continue
try:
DispatchApp.delete().where(DispatchApp.application.in_(
from_ids)).execute()
dispatch_ins = [{'application': id, 'partner':
current_bomber.partner_id, 'bomber': bid, 'status':
DisAppStatus.NORMAL.value} for id in from_ids]
DispatchApp.insert_many(dispatch_ins).execute()
except Exception as e:
logging.info(
'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str
(e), bid, from_ids))
<|reserved_special_token_0|>
def calc_entry_time(overdue_days):
app_entry_time = {}
overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],
'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}
for key, value in overdue_entry.items():
if value[0] <= overdue_days <= value[1]:
app_entry_time[key] = datetime.now()
else:
app_entry_time[key] = None
return app_entry_time
<|reserved_special_token_0|>
@action(MessageAction.SUMMARY_BOMBER_OVERDUE)
def summary_bomber_overdue_everyday(payload, msg_id):
cycle_list = Cycle.values()
which_day = date.today()
for cycle in cycle_list:
apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,
ApplicationR.ptp_bomber, ApplicationR.overdue_days,
ApplicationR.promised_date, ApplicationR.follow_up_date,
ApplicationR.external_id, OverdueBillR.status, OverdueBillR.
periods, OverdueBillR.sub_bill_id).join(OverdueBillR,
JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id
).where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()
bomber_overdue_list = []
for app in apps:
status = app.get('status')
if status == ApplicationStatus.REPAID.value:
continue
ptp_bomber = app.get('ptp_bomber')
promised_date = app.get('promised_date')
follow_up_date = app.get('follow_up_date')
if not promised_date or promised_date.date() < date.today():
ptp_bomber = promised_date = None
if not follow_up_date or follow_up_date.date() < date.today():
follow_up_date = None
overdue_dict = {'collection_id': app.get('id'), 'external_id':
app.get('external_id'), 'sub_bill_id': app.get(
'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app
.get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':
ptp_bomber, 'promised_date': promised_date,
'follow_up_date': follow_up_date, 'which_day': which_day,
'overdue_days': app.get('overdue_days')}
bomber_overdue_list.append(overdue_dict)
try:
if bomber_overdue_list:
with db.atomic():
for index in range(0, len(bomber_overdue_list), 1000):
insert_list = bomber_overdue_list[index:index + 1000]
BomberOverdue.insert_many(insert_list).execute()
except Exception as e:
logging.info(
'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %
(cycle, str(which_day), str(e)))
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)
def today_ptp_auto_call_switch(payload, msg_id):
today = datetime.today().date()
next_day = today + timedelta(days=1)
apps = ApplicationR.select(ApplicationR.latest_bomber).where(
ApplicationR.promised_date < next_day, ApplicationR.promised_date >=
today, ApplicationR.promised_date.is_null(False), ApplicationR.
status != ApplicationStatus.REPAID.value, ApplicationR.cycle <
Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(
ApplicationR.latest_bomber)
bids = [a.latest_bomber_id for a in apps]
if not bids:
return
q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(
BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids
).execute()
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)
def update_today_switch_every_day(payload, msg_id):
q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(
BomberPtp.auto_ext.is_null(False)).execute()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_HEALTH_CHECK)
def health_check(payload, msg_id):
pass
<|reserved_special_token_0|>
def dpd1_process(lst):
"""已废弃的方法"""
if not lst:
return
for key, l in lst.items():
rule = getattr(BeforeInBomber, key).value
query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)
).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(
rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.
call_success()))
success_set = {i.loanid for i in query}
failed_list = list(set(l) - success_set)
post_params = {'$and': rule.get('$and'), 'app_list': failed_list}
resp = Hyperloop().post('/bomber/score/verify', json=post_params)
if not resp.ok:
logging.error('hyperloop score verification failed: %s, %s',
str(resp.status_code), str(resp.text))
logging.error('hyperloop score verification failed: %s', str(
post_params))
continue
logging.debug('hyperloop score verification success: %s', resp.content)
resp_json = resp.json()
app_list = resp_json['data']
if not app_list:
continue
for item in app_list:
if random.randint(0, 5) == 1:
send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':
int(item)})
@action(MessageAction.GET_IVR)
def get_ivr(payload, msg_id):
logging.warning('start get_ivr')
sys_config = SystemConfig.select().where(SystemConfig.key ==
'DPD1-3_INTO_IVR').first()
now = date.today()
if sys_config and sys_config.value:
start = now - timedelta(days=3)
else:
start = now
end = now + timedelta(days=4)
item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()
if not item:
AutoIVR.delete().execute()
current_page = 0
elif item.current_page >= item.total_page:
return
else:
current_page = item.current_page
auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,
'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,
'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':
10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,
'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,
'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,
'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,
'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,
'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,
'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,
'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,
'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,
'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,
'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':
101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,
'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,
'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,
'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}
current_page += 1
with db.atomic() as transaction:
while True:
bill_service = BillService()
ivr_action = bill_service.ivr_pages(page=current_page,
page_size=500, start_time=utc_datetime(str(start)),
end_time=utc_datetime(str(end)))
result = ivr_action['result']
page_size = int(ivr_action.get('page_size', 0))
total_page = int(ivr_action.get('total_page', 0))
insert_args = []
for a in result:
due_at = no_utc_datetime(a['due_at'])
days = (due_at.date() - now).days
if days == 2:
continue
if days > 0:
time = str(days)
else:
time = str(days).replace('-', 'PDP')
key = a['app_name'] + str(a['su']) + time
group = auto_ivr.get(key)
user_id = a['user_id']
try:
user_resp = AccountService().get_user(path_params={
'user_id': user_id})
if str(user_resp['mobile_no']) == str(a['user_mobile_no']):
numbers = a['user_mobile_no']
else:
numbers = a['user_mobile_no'] + ',' + user_resp.get(
'mobile_no')
except:
logging.error('Request Account Service Error.')
numbers = a['user_mobile_no']
insert_args.append({'application_id': a['id'], 'numbers':
numbers, 'group': group, 'user_id': user_id})
AutoIVR.insert_many(insert_args).execute()
if current_page == 1:
IVRActionLog.create(total_page=total_page, proc_date=now,
page_size=page_size, current_page=current_page)
item = IVRActionLog.get(IVRActionLog.proc_date == now)
else:
item.current_page = current_page
item.page_size = page_size
item.total_page = total_page
item.save()
transaction.commit()
current_page += 1
if current_page > int(total_page):
break
if sys_config and sys_config.value:
try:
classfiy_dpd_ptp_apps()
except Exception as e:
logging.error('dpd1-3_test_error:%s' % str(e))
def ivr_t2_test():
t2_groups = [39, 40, 41, 42, 43, 44]
ivr_test_proportion = 0.2
sys_config = SystemConfig.select().where(SystemConfig.key ==
'IVR_TEST_PROPORTION').first()
if sys_config and sys_config.value:
ivr_test_proportion = float(sys_config.value)
t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.
status == AutoIVRStatus.AVAILABLE.value)
t2_dict = defaultdict(list)
for ivr in t2_ivrs:
t2_dict[ivr.group].append(ivr.id)
test_ivr_ids = []
for group, ivr_ids in t2_dict.items():
number = ceil(len(ivr_ids) * ivr_test_proportion)
test_ivr_ids += ivr_ids[:number]
if not test_ivr_ids:
return
q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.
group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()
<|reserved_special_token_0|>
@action(MessageAction.APP_MERGE)
@deprecated(version='1.0', reason='This function will be removed soon')
def app_merge(payload, msg_id):
sql = """
select *
from (
select a.id as id
from dashboard.application as a
inner join repayment.bill2 as b on b.external_id = a.id
where not exists (
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.app = 'DanaCepat'
and a.is_first_loan = 1
and a.apply_at < '2018-08-23 20:50:00'
and b.overdue_days between 1 and 3
and b.status != 2) result
where not exists (
select 1
from bomber.application as a
where a.cycle = 1
and a.status = 4
and a.id = result.id
)
"""
cursor = readonly_db.get_cursor()
cursor.execute(sql)
new_data = cursor.fetchall()
cursor.close()
if new_data:
bomber = [103, 104]
for d in new_data:
app_id = {'id': d[0]}
application_overdue(app_id, None)
Application.update(status=ApplicationStatus.AB_TEST.value,
latest_bomber=random.choice(bomber), ptp_bomber=None).where(
Application.id == d[0]).execute()
logging.warning('add new app success')
ptp = date.today() - timedelta(days=1)
del_sql = (
"""
select a.id
from bomber.application as a
where exists(
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.cycle = 1
and a.status = 4
and (a.promised_date is null or a.promised_date < "%s")
"""
% ptp)
cursor = readonly_db.get_cursor()
cursor.execute(del_sql)
del_date = cursor.fetchall()
cursor.close()
if del_date:
return
ids = list()
for d in del_date:
ids.append(d[0])
Application.update(status=ApplicationStatus.UNCLAIMED.value,
latest_bomber=None).where(Application.id << ids).execute()
@action(MessageAction.APPLICATION_BOMBER)
def application_overdue(payload, msg_id):
application_id = payload['id']
sub_bill_id = payload['bill_sub_id']
local_app = Application.select().where(Application.external_id ==
application_id).order_by(Application.finished_at).first()
if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:
logging.info('application %s overdue, already exists', application_id)
add_contact(local_app)
return
if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:
overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==
sub_bill_id, OverdueBillR.external_id == application_id)
if overdue_bill.exists():
logging.info(
'application %s,sub_bill_id %s overdue, already exists' % (
application_id, sub_bill_id))
return
try:
sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
sub_bill = sub_bill[0]
except Exception:
logging.error(
'application %s overdue, get sub_bill info failed:Request To repayment Error'
, application_id)
return
if sub_bill['status'] == 2:
logging.error('application %s overdue, but bills already cleared',
application_id)
return
overdue_days = sub_bill.get('overdue_days', 0)
if overdue_days == 0:
logging.info('application {} no overdue'.format(str(application_id)))
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
logging.error('get application %s failed: Request to GoldenEye.',
application_id)
return
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
apply_history = Dashboard().get('/users/%s/apply-history' % user_id)
if not apply_history.ok:
logging.error(
'get user %s apply history failed: Request to Dashboard Failed.',
user_id)
return
history = apply_history.json().get('data')
loan_success_times = len([(1) for i in history if i['status'] in [80,
90, 100, 70] and i['id'] != gold_app['id']])
id = application_id
type = ApplicationType.CASH_LOAN.value
bill_id = sub_bill.get('bill_id')
amount = sub_bill.get('amount')
amount_net = sub_bill.get('amount_net')
interest_rate = sub_bill.get('interest_rate')
overdue_days = sub_bill.get('overdue_days')
origin_due_at = sub_bill.get('origin_due_at')
sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,
'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),
'overdue_days': overdue_days, 'origin_due_at': origin_due_at,
'amount': amount, 'amount_net': amount_net, 'interest_rate':
interest_rate, 'external_id': application_id}
if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:
if local_app and local_app.status != ApplicationStatus.REPAID.value:
sub_overdue_bill['collection_id'] = local_app.id
local_app.amount += amount
local_app.amount_net += amount_net
local_app.save()
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('application %s,sub_bill_id:%s overdue created' %
(application_id, sub_bill_id))
return
else:
id = idg()
type = ApplicationType.CASH_LOAN_STAGING.value
sub_overdue_bill['collection_id'] = id
ptp_info = BombingHistory.filter(BombingHistory.application == id).first()
promised_amount = ptp_info and ptp_info.promised_amount
promised_date = ptp_info and ptp_info.promised_date
application = Application.create(id=id, user_id=gold_app['user_id'],
user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[
'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],
contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(
'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=
birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(
gold_app.get('id_ektp')), profile_province=(gold_app.get(
'profile_province') or {}).get('name'), profile_city=(gold_app.get(
'profile_city') or {}).get('name'), profile_district=(gold_app.get(
'profile_district') or {}).get('name'), profile_residence_time=
gold_app.get('profile_residence_time'), profile_residence_type=
gold_app.get('profile_residence_type'), profile_address=gold_app.
get('profile_address'), profile_education=gold_app.get(
'profile_education'), profile_college=(gold_app.get(
'profile_college') or {}).get('name'), job_name=gold_app.get(
'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get
('job_bpjs'), job_user_email=gold_app.get('job_user_email'),
job_type=gold_app.get('job_type'), job_industry=gold_app.get(
'job_industry'), job_department=gold_app.get('job_department'),
job_province=(gold_app.get('job_province') or {}).get('name'),
job_city=(gold_app.get('job_city') or {}).get('name'), job_district
=(gold_app.get('job_district') or {}).get('name'), job_address=
gold_app.get('job_address'), amount=amount, amount_net=amount_net,
interest_rate=interest_rate, term=gold_app.get('term'),
origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=
sub_bill.get('repay_at'), loan_success_times=loan_success_times,
arrived_at=datetime.now(), follow_up_date=datetime.now(),
promised_amount=promised_amount, promised_date=promised_date,
external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=
datetime.now())
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('overdue application %s created', application_id)
Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)
add_contact(application)
def add_contact(application):
logging.info('start add contact for application: %s', application.id)
contacts = Contact.filter(Contact.user_id == application.user_id)
existing_numbers = {contact.number for contact in contacts}
insert_contacts = list()
mon_insert_contact = {}
user_mobile_no = number_strip(application.user_mobile_no)
if user_mobile_no and user_mobile_no not in existing_numbers:
insert_contacts.append({'user_id': application.user_id, 'name':
application.user_name, 'number': user_mobile_no, 'relationship':
Relationship.APPLICANT.value, 'source': 'apply info',
'real_relationship': Relationship.APPLICANT.value})
existing_numbers.add(number_strip(application.user_mobile_no))
extra_phone = GoldenEye().get('/users/%s/extra-phone' % application.user_id
)
if not extra_phone.ok:
extra_phone = []
logging.error('get user %s extra contacts failed', application.user_id)
else:
extra_phone = extra_phone.json()['data']
if extra_phone:
for i in extra_phone:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({'user_id': application.user_id, 'name':
application.user_name, 'number': number, 'relationship':
Relationship.APPLICANT.value, 'source': 'extra phone',
'real_relationship': Relationship.APPLICANT.value})
key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
ec_contact = []
contact = json.loads(application.contact or '[]')
for i in contact:
if number_strip(i['mobile_no']
) not in existing_numbers and number_strip(i['mobile_no']):
ec_contact.append({'user_id': application.user_id, 'name': i[
'name'], 'number': number_strip(i['mobile_no']),
'relationship': Relationship.FAMILY.value, 'sub_relation':
SubRelation.EC.value, 'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number_strip(i['mobile_no']
), ContactType.F_EC.value
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['mobile_no']))
if i['type'] != 1:
continue
if number_strip(i['tel_no']) not in existing_numbers and number_strip(i
['tel_no']):
ec_contact.append({'user_id': application.user_id, 'name': i[
'name'], 'number': number_strip(i['tel_no']),
'relationship': Relationship.FAMILY.value, 'sub_relation':
SubRelation.EC.value, 'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number_strip(i['tel_no']
), ContactType.F_EC.value
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['tel_no']))
if ec_contact:
Contact.insert_many(ec_contact).execute()
if all((application.job_tel, number_strip(application.job_tel),
number_strip(application.job_tel) not in existing_numbers)):
insert_contacts.append({'user_id': application.user_id, 'name':
None, 'number': number_strip(application.job_tel),
'relationship': Relationship.COMPANY.value, 'source':
'basic info job_tel', 'real_relationship': Relationship.COMPANY
.value})
key = user_mobile_no, number_strip(application.job_tel
), ContactType.C_BASIC_INFO_JOB_TEL.value
mon_insert_contact[key] = 1, 0, None
existing_numbers.add(number_strip(application.job_tel))
sms_contacts = GoldenEye().get('/applications/%s/sms-contacts' %
application.external_id)
if not sms_contacts.ok:
sms_contacts = []
logging.info('get user %s sms contacts failed', application.external_id
)
else:
sms_contacts = sms_contacts.json()['data']
if sms_contacts:
for i in sms_contacts:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({'user_id': application.user_id, 'name':
i['name'][:128], 'number': number, 'relationship':
Relationship.SUGGESTED.value, 'source': 'sms contacts',
'real_relationship': Relationship.SUGGESTED.value})
key = user_mobile_no, number, ContactType.S_SMS_CONTACTS.value
mon_insert_contact[key] = 1, 0, i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
cf = GoldenEye().get('/applications/%s/call/frequency' % application.
external_id)
if not cf.ok:
call_frequency = []
logging.error('get application %s call frequency error',
application.external_id)
else:
call_frequency = cf.json()['data']
insert_contacts = []
fm = GoldenEye().get('/applications/%s/contact/family-member' %
application.external_id)
if not fm.ok:
family = []
logging.error('get application %s family-member info error',
application.external_id)
else:
family = fm.json()['data']
if family:
for i in family:
if not i.get('number'):
logging.info('family member %s' % str(i))
continue
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
logging.info('family members: %s' % str(i))
insert_contacts.append({'user_id': application.user_id, 'name':
i['name'][:128], 'number': number, 'relationship':
Relationship.FAMILY.value, 'source': FamilyContactType.
CALLEC.value, 'total_count': i.get('total_count', 1),
'total_duration': i.get('total_duration', 0),
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number, ContactType.F_CALL_EC.value
mon_insert_contact[key] = i.get('total_count', 1), i.get(
'total_duration', 0), i['name'][:128]
existing_numbers.add(number)
mon_update_contact = {}
if call_frequency:
with db.atomic():
count = 1
for i in call_frequency:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
Contact.update(total_count=i['total_count'],
total_duration=i['total_duration']).where(Contact.
number == number, Contact.user_id == application.
user_id)
key = user_mobile_no, number
mon_update_contact[key] = i['total_count'], i[
'total_duration']
continue
if count < 6:
insert_contacts.append({'user_id': application.user_id,
'name': i['name'][:128], 'number': number,
'relationship': Relationship.FAMILY.value,
'total_count': i['total_count'], 'total_duration':
i['total_duration'], 'source': FamilyContactType.
CALLTOP5.value, 'real_relationship': Relationship.
FAMILY.value})
count += 1
key = user_mobile_no, number, ContactType.F_CALL_TOP5.value
mon_insert_contact[key] = i['total_count'], i[
'total_duration'], i['name'][:128]
else:
insert_contacts.append({'user_id': application.user_id,
'name': i['name'][:128], 'number': number,
'relationship': Relationship.SUGGESTED.value,
'total_count': i['total_count'], 'total_duration':
i['total_duration'], 'source': 'call frequency',
'real_relationship': Relationship.SUGGESTED.value})
key = (user_mobile_no, number, ContactType.
S_CALL_FREQUENCY.value)
mon_insert_contact[key] = i['total_count'], i[
'total_duration'], i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
next_apply_list = AccountService().add_contact(application.user_id)
for next_apply in next_apply_list:
number = number_strip(str(next_apply))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=application.
user_name, number=number, relationship=Relationship.
SUGGESTED.value, source='online profile phone',
real_relationship=Relationship.SUGGESTED.value)
key = (user_mobile_no, number, ContactType.
S_ONLINE_PROFILE_PHONE.value)
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
next_applicant = GoldenEye().get('/bomber/%s/dual_contact' %
application.user_id)
if not next_applicant.ok:
next_applicant = []
logging.error('get user %s dual_contact contacts failed' %
application.user_id)
else:
next_applicant = next_applicant.json()['data']
if next_applicant:
for i in next_applicant:
number = number_strip(str(i))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=
application.user_name, number=number, relationship=
Relationship.APPLICANT.value, source='apply info',
real_relationship=Relationship.APPLICANT.value)
key = user_mobile_no, number, ContactType.A_APPLY_INFO.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
numbers = []
try:
numbers = AccountService().ktp_number(path_params={'user_id':
application.user_id})
except Exception as e:
logging.info('request ktp numbers failed %s' % str(e))
for n in numbers:
number = number_strip(str(n))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=application.
user_name, number=number, relationship=Relationship.
APPLICANT.value, source='ktp number', real_relationship=
Relationship.APPLICANT.value)
key = user_mobile_no, number, ContactType.A_KTP_NUMBER.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
try:
ecs = GoldenEye().get('/applications/%s/contact/ec' % application.
external_id)
except Exception as e:
logging.info('request ec-member error: %s' % str(e))
try:
if not ecs.ok:
ec = []
logging.info('get application %s ec-member info error',
application.external_id)
else:
ec = ecs.json()['data']
if ec:
for e in ec:
number = number_strip(e['numbers'])[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=e[
'name'][:128], number=number, relationship=
Relationship.FAMILY.value, source=FamilyContactType
.CONTACTEC.value, real_relationship=Relationship.
FAMILY.value)
key = (user_mobile_no, number, ContactType.F_CONTACT_EC
.value)
mon_insert_contact[key] = 1, 0, e['name'][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add ec_member error:%s' % str(e))
try:
mn = GoldenEye().get('/applications/%s/contact/my_number' %
application.external_id)
except Exception as e:
logging.info('request my_number error: %s' % str(e))
try:
if not mn.ok:
my = []
logging.info('get application %s my_number info error',
application.external_id)
else:
my = mn.json()['data']
if my:
for m in my:
number = number_strip(m)[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=my[m][
:128], number=number, relationship=Relationship.
SUGGESTED.value, source='my number',
real_relationship=Relationship.SUGGESTED.value)
key = user_mobile_no, number, ContactType.S_MY_NUMBER.value
mon_insert_contact[key] = 1, 0, my[m][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add my_member error:%s' % str(e))
try:
cn = GoldenEye().get('/applications/%s/contact/company-number' %
application.external_id)
except Exception as e:
logging.info('request company-number error: %s' % str(e))
try:
if not cn.ok:
cn = []
logging.info('get application %s company_number info error',
application.external_id)
else:
cn = cn.json()['data']
if cn:
for c in cn:
number = c
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=cn[c][
:128], number=number, relationship=Relationship.
COMPANY.value, source='company', real_relationship=
Relationship.COMPANY.value)
key = user_mobile_no, number, ContactType.C_COMPANY.value
mon_insert_contact[key] = 1, 0, cn[c][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add company_member error:%s' % str(e))
try:
ol = AccountService().other_login_contact(userId=application.user_id)
except Exception as e:
logging.error('request other_login error: %s' % e)
ol = {}
try:
for o in ol:
number = number_strip(o)
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=ol[o][:128
], number=number, relationship=Relationship.SUGGESTED.
value, source='other_login', real_relationship=
Relationship.SUGGESTED.value)
key = user_mobile_no, number, ContactType.S_OTHER_LOGIN.value
mon_insert_contact[key] = 1, 0, ol[o][:128]
except Exception as e:
logging.error('add other_login number error:%s' % e)
logging.info('add contact for application %s finished', application.id)
if mon_insert_contact or mon_update_contact:
send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON, {
'user_mobile_no': user_mobile_no, 'insert_contact': str(
mon_insert_contact), 'update_contact': str(mon_update_contact),
'user_id': application.user_id, 'name': application.user_name})
<|reserved_special_token_0|>
def get_contact_from_mongo(number):
if not number:
return []
query = TotalContact.objects(src_number=number, source__in=TotalContact
.available()).order_by('source')
lst = []
for c in query:
relation = TotalContact.relationship(c.source)
if relation == -1:
continue
source = TotalContact.str_source(c.source)
if not source:
continue
lst.append({'related_number': c.dest_number, 'source': source,
'is_calc': c.is_calc, 'total_count': c.total_count,
'total_duration': c.total_duration, 'relation': relation,
'name': c.dest_name})
return lst
<|reserved_special_token_0|>
def check_key_not_none(payload, keys):
for key in keys:
if payload.get(key) is None:
logging.error('Missing args {}'.format(str(key)))
return False
return True
@action(MessageAction.BILL_PAID)
def bill_paid(payload, msg_id):
validate = check_key_not_none(payload, ['external_id', 'late_fee_part',
'principal_part', 'paid_at', 'bill_sub_id', 'partner_bill_id'])
if not validate:
logging.error('payload key not fully pass in.')
return
external_id = payload['external_id']
late_fee_part = Decimal(payload['late_fee_part'])
principal_part = Decimal(payload['principal_part'])
paid_at = payload['paid_at']
partner_bill_id = payload['partner_bill_id']
logging.debug(
'application %s paid principal part %s, paid late fee part %s',
external_id, principal_part, late_fee_part)
application = Application.filter(Application.external_id == external_id
).order_by(-Application.created_at).first()
if not application:
logging.info('application %s paid, not found application', external_id)
return
sub_bill_id = payload['bill_sub_id']
overdue_bill = OverdueBillR.select().where(OverdueBillR.collection_id ==
application.id, OverdueBillR.sub_bill_id == sub_bill_id).first()
if (application.type == ApplicationType.CASH_LOAN_STAGING.value and not
overdue_bill):
logging.info('bill sub not in bomber %s', sub_bill_id)
return
with db.atomic():
repay_at = str_no_utc_datetime(payload['latest_repay_at'])
Application.update(repay_at=repay_at).where(Application.id ==
application.id).execute()
RepaymentLog.create(application=application.id, is_bombed=True,
current_bomber=application.latest_bomber_id, cycle=application.
cycle, principal_part=principal_part, late_fee_part=
late_fee_part, repay_at=paid_at, ptp_bomber=application.
ptp_bomber, latest_call=application.latest_call, periods=
overdue_bill.periods if overdue_bill else None, overdue_bill_id
=overdue_bill.id if overdue_bill else None, partner_bill_id=
partner_bill_id)
phone_status = PhoneStatus.CONNECTED.value
real_relationship = RealRelationship.user_values()
commit = CallActionCommit.NO.value
number = CallActions.select(CallActions.number).where(CallActions.
phone_status == phone_status, CallActions.real_relationship <<
real_relationship, CallActions.commit == commit, CallActions.
application == application.id).order_by(-CallActions.created_at
).first()
if number:
Contact.update(call_priority=PriorityStatus.REPAY.value).where(
Contact.user_id == application.user_id, Contact.
call_priority == PriorityStatus.LAST.value).execute()
Contact.update(call_priority=PriorityStatus.LAST.value).where(
Contact.user_id == application.user_id, Contact.number ==
number.number).execute()
if not application.latest_bomber_id:
return
Inbox.create(title='application %s,sub_bill_id %s repaid' % (
application.external_id, sub_bill_id), content=
'application %s,sub_bill_id %s repaid' % (application.
external_id, sub_bill_id), receiver=application.
latest_bomber_id or application.last_bomber_id, category=
InboxCategory.REPAID.value)
@action(MessageAction.BILL_RELIEF)
def bill_relief(payload, msg_id):
"""已废弃"""
bill = payload['head_bill']
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_row = Application.update(repay_at=repay_at).where(Application.
id == bill['external_id']).execute()
logging.info('application %s bill relief done', bill['external_id'])
return updated_row
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)
def calc_overdue_days_over(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query = Application.update(overdue_days=overdue_days).where(Application
.status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.
overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)
updated_rows_count = query.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count)
try:
calc_overdue_days_over_instalment()
except Exception as e:
logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))
apps = Application.filter(Application.status << [ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value], Application.overdue_days > 95,
Application.promised_date.is_null(True) | (fn.DATE(Application.
promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {
'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
def calc_overdue_days_over_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days > 95)
updated_rows_count = query.execute()
logging.info(
'calc_overdue_days_over_instalment done,count:%s,status:%s' % (
updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.collection_id,
OverdueBill.overdue_days).join(Application, JOIN_LEFT_OUTER, on
=OverdueBill.collection_id == Application.id).where(Application
.status == status, Application.type == ApplicationType.
CASH_LOAN_STAGING.value)
app_update = {}
for ob in overdue_bills:
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
<|reserved_special_token_0|>
def calc_overdue_days_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
today_now_time = datetime.now()
month_first_day = today_now_time.replace(day=1, hour=1, minute=30,
second=0, microsecond=0)
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days <= 95)
updated_rows_count = query.execute()
logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %
(updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.
created_at, OverdueBill.collection_id, OverdueBill.overdue_days
).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.
collection_id == Application.id).where(Application.status ==
status, Application.type == ApplicationType.CASH_LOAN_STAGING.value
)
app_update = {}
for ob in overdue_bills:
if (ob.status == ApplicationStatus.REPAID.value and ob.
created_at < month_first_day):
continue
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)
def automatic_escalation(payload, msg_id):
app_ids = payload.get('application_list', [])
if not app_ids:
return
apps = Application.select().where(Application.id.in_(app_ids),
Application.status != ApplicationStatus.REPAID.value)
for a in apps:
new_cycle = application_entry_different_calculations(a)
if a.overdue_days < 90:
logging.info(
'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'
.format(a.id, new_cycle, a.cycle, a.overdue_days))
if new_cycle > a.cycle:
with db.atomic():
if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle
.C1B.value):
bomber_id = (a.latest_bomber_id if a.latest_bomber_id else
a.cycle)
DispatchAppHistory.update(out_at=datetime.now(),
out_overdue_days=a.overdue_days).where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == bomber_id).execute()
Escalation.create(application=a.id, type=EscalationType.
AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,
current_cycle=a.cycle, escalate_to=new_cycle,
current_bomber_id=a.latest_bomber)
dis_app_update = DispatchApp.update(status=DisAppStatus.
ABNORMAL.value).where(DispatchApp.application == a.id)
dis_app_update.execute()
a.cycle = new_cycle
a.last_bomber = a.latest_bomber
a.status = ApplicationStatus.UNCLAIMED.value
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
if new_cycle == Cycle.C1B.value:
a.C1B_entry = datetime.now()
elif new_cycle == Cycle.C2.value:
a.C2_entry = datetime.now()
elif new_cycle == Cycle.C3.value:
a.C3_entry = datetime.now()
a.save()
logging.info('automatic escalation done')
def application_entry_different_calculations(app):
conf = {(1): [1, 10], (2): [11, 30], (3): [31, 60], (4): [61, 90], (5):
[91, 999999]}
for new_cycle, scopes in conf.items():
if scopes[0] <= app.overdue_days <= scopes[1]:
return new_cycle
return app.cycle
@action(MessageAction.BOMBER_CALC_SUMMARY)
def cron_summary(payload, msg_id):
"""已废弃"""
employees = Bomber.select(Bomber, Role).join(Role)
summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,
'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,
'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':
0, 'sms_sent': 0} for i in employees}
now_date = date.today()
cal_date = now_date - timedelta(days=1)
claimed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('claimed')).where(fn.DATE(Application.
claimed_at) == cal_date, Application.status << [ApplicationStatus.
PROCESSING.value, ApplicationStatus.REPAID.value], Application.
latest_bomber.is_null(False)).group_by(Application.latest_bomber)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False)).group_by(
Application.latest_bomber)
completed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('completed')).where(Application.
latest_bombing_time.is_null(False), fn.DATE(Application.
latest_bombing_time) == cal_date, Application.latest_bomber.is_null
(False)).group_by(Application.latest_bomber)
escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(
Escalation.id).alias('escalated')).where(fn.DATE(Escalation.
created_at) == cal_date, Escalation.type == EscalationType.
AUTOMATIC.value, Escalation.current_bomber.is_null(False),
Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation
.current_bomber)
transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).
alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==
cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(
Transfer.operator)
promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(
BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.
created_at) == cal_date, BombingHistory.result == BombingResult.
HAS_PROGRESS.value).group_by(BombingHistory.bomber)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,
RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.
current_bomber)
calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id
).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==
cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)
calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.
record_id).alias('calls_connected')).where(fn.DATE(CallLog.
time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==
'1').group_by(CallLog.user_id)
sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(
ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_
(ConnectType.sms()), ConnectHistory.created_at >= cal_date,
ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)
for i in claimed:
summary[i.latest_bomber_id]['claimed'] += i.claimed
for i in completed:
summary[i.latest_bomber_id]['completed'] += i.completed
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
for i in escalated:
summary[i.current_bomber_id]['escalated'] += i.escalated
for i in transferred:
summary[i.operator_id]['transferred'] += i.transferred
for i in promised:
summary[i.bomber_id]['promised'] += i.promised
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in calls_made:
summary[int(i.user_id)]['calls_made'] += i.calls_made
for i in calls_connected:
summary[int(i.user_id)]['calls_connected'] += i.calls_connected
for i in sms_sent:
summary[i.operator_id]['sms_sent'] += i.sms_sent
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'claimed': data['claimed'], 'completed': data['completed'],
'cleared': data['cleared'], 'escalated': data['escalated'],
'transferred': data['transferred'], 'promised': data['promised'
], 'amount_recovered': data['amount_recovered'], 'calls_made':
data['calls_made'], 'calls_connected': data['calls_connected'],
'sms_sent': data['sms_sent'], 'date': cal_date})
if insert_args:
Summary.insert_many(insert_args).execute()
cycle_args = []
escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(
Escalation.id).alias('escalated_in')).where(Escalation.status ==
ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==
cal_date).group_by(Escalation.escalate_to)
for i in escalated_in:
cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.
escalated_in, 'date': cal_date})
amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM
(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)
for i in amount_recovered_total:
amount_recovered_total = i.principal_part + i.late_fee_part
cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':
amount_recovered_total, 'date': cal_date})
if cycle_args:
Summary.insert_many(cycle_args).execute()
logging.info('cal summary done')
send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})
@action(MessageAction.BOMBER_CALC_SUMMARY2)
def cron_summary2(payload, msg_id):
"""已废弃,定时任务还在执行,具体情况待确定"""
cal_date = date.today() - timedelta(days=1)
employees = Bomber.select(Bomber, Role).join(Role)
auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,
AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')
).where(fn.DATE(AutoCallActions.created_at) == cal_date)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.
is_null(False), RepaymentLog.is_bombed == True)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False))
auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,
AutoCallActions.result)
amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)
cleared = cleared.group_by(Application.latest_bomber)
summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,
'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered':
0} for e in employees}
for a in auto_call_actions:
summary[a.bomber_id]['answered_calls'] += a.count
if a.result == AutoCallResult.PTP.value:
summary[a.bomber_id]['ptp'] += a.count
if a.result == AutoCallResult.FOLLOW_UP.value:
summary[a.bomber_id]['follow_up'] += a.count
if a.result == AutoCallResult.NOT_USEFUL.value:
summary[a.bomber_id]['not_useful'] += a.count
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'answered_calls': data['answered_calls'], 'ptp': data['ptp'],
'follow_up': data['follow_up'], 'not_useful': data['not_useful'
], 'cleared': data['cleared'], 'amount_recovered': str(data[
'amount_recovered']), 'date': cal_date})
if insert_args:
Summary2.insert_many(insert_args).execute()
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_DISCOUNT_APPROVED)
def bomber_discount_approved(payload, msg_id):
app_id = payload['id']
msg_type = payload['msg_type']
discount_to = payload['discount_to']
effective_to = payload['effective_to']
application = Application.filter(Application.id == app_id).first()
if not application:
logging.error(
'discount approved msg send failed application %s not found',
app_id)
return
template = Template.select(Template.text, Template.app).where(Template.
type == ConnectType.AUTO_SMS.value, Template.id << Template.
get_auto_sms_tpl(msg_type), Template.app == application.app).first()
if not template:
logging.error('discount approved msg send failed template %s not found'
, msg_type)
return
promised_date = None
if application.promised_date:
promised_date = application.promised_date.strftime('%d-%m-%Y')
tpl_data = {'user_name': application.user_name, 'due_days': application
.overdue_days, 'app_name': application.app, 'phone': application.
user_mobile_no, 'cs_number': cs_number_conf.get(application.app,
'02150202889'), 'promised_date': promised_date, 'discount_to':
discount_to, 'effective_to': effective_to}
content = template.text.format(**tpl_data)
data_list = [{'receiver': '62' + application.user_mobile_no, 'content':
content, 'title': ''}]
send_sms(data_list, msg_type, application.app)
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_AUTO_CALL_LIST)
def bomber_auto_call_list(payload, msg_id):
with db.atomic():
bomber_dispatch_app()
dispatch_instalment_app()
dis_apps = DispatchApp.select(DispatchApp.application).where(
DispatchApp.status == DisAppStatus.NORMAL.value)
c1_apps = Application.select(Application.id, Application.cycle,
Application.follow_up_date, Application.called_times).where(Application
.status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.
AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.
is_rejected == False, Application.promised_date.is_null(True) | (fn
.DATE(Application.promised_date) < datetime.today().date())).order_by(
Application.overdue_days, Application.apply_at)
dis_apps_ids = [da.application_id for da in dis_apps]
insert_args = []
for a in c1_apps:
if a.id in dis_apps_ids:
continue
insert_args.append({'application': a.id, 'cycle': a.cycle,
'follow_up_date': a.follow_up_date, 'called_times': 1 if a.
called_times else 0, 'description': 'init'})
if not insert_args:
logging.error('no application need auto call')
with db.atomic():
AutoCallList.delete().execute()
for idx in range(0, len(insert_args), 100):
AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()
for idx in range(0, len(insert_args), 100):
application_list = [i['application'] for i in insert_args[idx:idx +
100]]
send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {
'application_list': application_list})
logging.info('bomber generate auto call list finished')
send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})
class ChangeBomberTool(object):
@staticmethod
def in_record(bomber_id, ids, bd):
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.
alias('application_id'), R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(
'expected_out_time'), Application.overdue_days.alias(
'entry_overdue_days')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << ids)
Application.update(latest_bomber=bomber_id).where(Application.id.
in_(ids)).execute()
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bd),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@staticmethod
def out_record(a, bd):
_id = str(a.id)
DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a
.overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]
.get('principal_paid')), out_late_fee_pending=bd[_id].get(
'late_fee') - bd[_id].get('late_fee_paid')).where(
DispatchAppHistory.application == a.id, DispatchAppHistory.
bomber_id == a.latest_bomber_id).execute()
a.last_bomber = a.latest_bomber
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
a.save()
@staticmethod
def classify(l, b):
if len(l) == 1:
return l[0]
_l = filter(lambda x: x['bomber'] != b, l)
return min(_l, key=lambda x: len(x['ids']))
<|reserved_special_token_0|>
def bomber_dispatch_app():
try:
c1a_dispatch_app()
except Exception as e:
logging.error('c1a_dispatch_app error:%s' % str(e))
cycle = {(1): 10, (2): 30, (3): 60, (4): 90}
apps = Application.select().where(fn.DATE(Application.C2_entry) == date
.today(), Application.type == ApplicationType.CASH_LOAN.value)
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C2.value)
apps_ids = [a.id for a in apps]
dispatch_inserts = []
start_index = 0
apps_length = len(apps_ids)
logging.warning('apps length %s' % str(apps_length))
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)
gen = CycleIter([b.id for b in bombers])
existing_list = []
end_index = start_index + int(apps_length * p.app_percentage)
logging.info('partner length %s' % str(end_index))
if not apps_ids[start_index:end_index]:
continue
bills = BillService().bill_list(application_ids=apps_ids[
start_index:end_index])
bill_dict = {bill['application_id']: bill for bill in bills}
for a_id in apps_ids[start_index:end_index]:
bomber = average_gen(gen, existing_list)
q = DispatchApp.delete().where(DispatchApp.application == a_id
).execute()
dispatch_inserts.append({'application': a_id, 'bomber': bomber,
'partner': p.id})
application = Application.select().where(Application.id == a_id
).first()
application.latest_bomber = bomber
application.status = ApplicationStatus.AB_TEST.value
application.ptp_bomber = None
application.save()
day_next_cycle = cycle.get(application.cycle
) - application.overdue_days
DispatchAppHistory.create(application=a_id, partner_id=p.id,
bomber_id=bomber, entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - Decimal(
bill_dict[a_id].get('principal_paid')),
entry_late_fee_pending=Decimal(bill_dict[a_id].get(
'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')
), expected_out_time=date.today() + timedelta(days=
day_next_cycle))
start_index = end_index
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
config = SystemConfig.prefetch(SCI.AB_TEST_C2)
c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)
c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)
c2 = apps_ids[start_index:]
if c2:
bills = BillService().bill_list(application_ids=c2)
else:
bills = []
bill_dict = {bill['application_id']: bill for bill in bills}
logging.info('c2 AB_test length: %s' % str(c2))
gen = CycleIter(c2_bomber)
existing_list = []
for c in c2:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == c).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = 46 - application.overdue_days
DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at
=datetime.now(), entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - bill_dict[c].get(
'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(
'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),
expected_out_time=date.today() + timedelta(days=day_next_cycle))
ab_test_other()
def c1a_dispatch_app():
today = datetime.today().date()
tomorrow = today + timedelta(days=1)
c1a_apps = Application.select().where(Application.status << [
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value], Application.dpd1_entry >= today, Application.dpd1_entry <
tomorrow, Application.type == ApplicationType.CASH_LOAN.value)
all_aids = [a.id for a in c1a_apps]
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C1A.value)
end = 0
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
is_del == 0)
start = end
end += int(len(all_aids) * p.app_percentage)
aids = all_aids[start:end]
bids = [b.id for b in bombers]
if not bids or not aids:
continue
average_number = get_average_number(len(aids), len(bids))
p_end = 0
for i, bid in enumerate(bids):
p_start = p_end
p_end += average_number[i]
b_aids = aids[p_start:p_end]
with db.atomic():
q = Application.update(latest_bomber=bid, status=
ApplicationStatus.AB_TEST.value).where(Application.id <<
b_aids).execute()
params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,
'application_ids': b_aids, 'dest_bomber_id': bid}
new_in_record(**params)
try:
dispatch_inserts = []
for aid in b_aids:
dispatch_inserts.append({'application': aid, 'bomber':
bid, 'partner': p.id, 'status': DisAppStatus.NORMAL
.value})
if dispatch_inserts:
q = DispatchApp.insert_many(dispatch_inserts).execute()
except Exception as e:
logging.error('c1a分件写入dispatch_app error:%s' % str(e))
<|reserved_special_token_0|>
def get_cash_bomber(bids, cycle):
cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==
0, Bomber.instalment != cycle)
cash_bids = [b.id for b in cash_bombers]
return cash_bids
<|reserved_special_token_0|>
def out_and_in_record_instalment(**kwargs):
if not kwargs.get('application_ids'):
return
out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(
DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True)).execute()
cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(kwargs['dest_bomber_id'])).alias(
'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.
alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).
alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << kwargs[
'application_ids'])
application_list = list(subquery)
for idx in range(0, len(application_list), 50):
applications = application_list[idx:idx + 50]
app_ids = [i.application_id for i in applications]
overdue_bills = OverdueBill.select().where(OverdueBill.
collection_id << app_ids)
sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]
bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)
insert_args = lambad_instalment_result(bill_list, applications)
if not insert_args:
continue
DispatchAppHistory.insert_many(insert_args).execute()
<|reserved_special_token_0|>
def c1b_dispatch_in_record(**kwargs):
app_ids = kwargs.get('apps')
partner_id = kwargs.get('partner_id', 'null')
bill_dict = kwargs.get('bill_dict')
period = kwargs.get('period')
bomber_id = kwargs.get('bomber_id')
if not all([app_ids, partner_id, bill_dict, period]):
return False
bill_dict = {str(k): v for k, v in bill_dict.items()}
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().
alias('entry_at'), Application.overdue_days.alias(
'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.id << app_ids)
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
<|reserved_special_token_0|>
def check_call_history(application):
app_create_at = application.created_at + timedelta(days=4)
if datetime.today().date() > app_create_at.date():
call_actions = CallActions.select().where(CallActions.type == 0,
CallActions.application == application.id, CallActions.
created_at > datetime.now() - timedelta(days=5))
for call in call_actions:
if call.phone_status == PhoneStatus.CONNECTED.value:
return True
return False
return True
@action(MessageAction.BOMBER_SCAVENGER)
def scavenger(payload, msg_id):
scavenger_time = -60
scavenger = SystemConfig.select().where(SystemConfig.key ==
'SCAVENGER_TIME').first()
if scavenger and scavenger.value.isdigit():
scavenger_time = -int(scavenger.value)
update_auto_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value, description='scavenger').where(AutoCallList.status ==
AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime
.now() + timedelta(minutes=scavenger_time))
count = update_auto_call_list.execute()
logging.info('scavenger processed %s application', count)
mail_box_scavenger_time = -30
mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==
'MAIL_BOX_SCAVENGER_TIME').first()
if mail_box_scavenger and mail_box_scavenger.value.isdigit():
mail_box_scavenger_time = -int(mail_box_scavenger.value)
update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.
value, AutoCallList.updated_at < datetime.now() + timedelta(minutes
=mail_box_scavenger_time))
mail_box_count = update_mail_box_call_list.execute()
logging.info('scavenger update mail box %s', mail_box_count)
update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value
).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.
updated_at < datetime.now() + timedelta(minutes=-30))
ivr_result = update_auto_ivr.execute()
logging.info('scavenger update %s ivr' % ivr_result)
<|reserved_special_token_0|>
@action(MessageAction.REPORT_BOMBER_COLLECTION)
def report_bomber_collection(payload, msg_id):
start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)
).scalar()
now = datetime.now()
if start_date and str(start_date) == str(now)[:10]:
return
end_date = str(now + timedelta(days=1))[:10]
start_date = str(now)[:10]
dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))
all_overdue_loan_sql1 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
GROUP BY 1;
"""
s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()
d1 = OperatedDict(s_data1)
all_overdue_loan_sql2 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE DATE(ba.follow_up_date) > CURDATE()
AND ba.called_counts = 0
GROUP BY 1;
"""
s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()
d2 = OperatedDict(s_data2)
overdue_loans_entered_into_predict_call_system_sql = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE ba.called_counts >= 1
GROUP BY 1;
"""
s_data3 = readonly_db.execute_sql(
overdue_loans_entered_into_predict_call_system_sql).fetchall()
d3 = OperatedDict(s_data3)
loans_completed_sql = """
SELECT ba.cycle, COUNT(DISTINCT ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()
d4 = OperatedDict(s_data4)
connected_calls_automatic_sql = """
SELECT ba.cycle, COUNT(ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()
d5 = OperatedDict(s_data5)
connected_calls_manual_sql = """
SELECT bb.cycle, COUNT(bb.id)
FROM bomber.bombing_history bb
WHERE DATE(bb.created_at) = curdate()
AND (bb.bomber_id < 150 OR bb.bomber_id > 200)
GROUP BY bb.cycle;
"""
s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()
d6 = OperatedDict(s_data6)
logging.info('Directly get data from database successfully.')
c1 = d1 - d2
c2 = d3
c3 = c2 / c1
c4 = d4
c5 = c4 / c2
c6 = d5
c7 = c6 / c4
c8 = d6
c9 = OperatedDict(get_agent())
c10 = (c6 + c8) / c9
try:
c11 = average_call_duration_team(start_date, end_date)
except AttributeError:
c11 = {}
lst = []
for i in range(1, 5):
lst.append({'apply_date': start_date, 'cycle': dct[i],
'all_overdue_loan': c1.get(i, 0),
'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),
'of_overdue_loans_entered_into_predict_call_system': round(c3.
get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),
'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *
100, 1), 'connected_calls_automatic': c6.get(i, 0),
'connected_calls_automatic_completed_loans': round(c7.get(i, 0),
1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i,
0), 'average_calls_agent': round(c10.get(i, 0), 1),
'average_call_duration_team': round(c11.get(i, 0), 1)})
ReportCollection.insert_many(lst).execute()
logging.info('report_bomber_collection:Done!')
<|reserved_special_token_0|>
def new_out_record(**kwargs):
if not kwargs['application_ids']:
return
DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.
bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.
application << kwargs['application_ids'], DispatchAppHistory.out_at
.is_null(True)).execute()
if kwargs.get('month_dispatch'):
return
try:
Application.update(ptp_bomber=None).where(Application.id << kwargs[
'application_ids']).execute()
except Exception as e:
logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[
'application_ids'], str(e)))
<|reserved_special_token_0|>
def end_old_application(old_app, paid=False):
if paid:
if old_app.status == OldLoanStatus.WAITING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return old_app.application_id
end_date = old_app.end_date
now = datetime.now()
if now >= max(end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
old_app.save()
return old_app.application_id
<|reserved_special_token_0|>
def start_old_application(old_app, cancel=False):
application_id = old_app.application_id
if cancel and old_app.status == OldLoanStatus.PAID.value:
now = datetime.now()
if old_app.start_date is None:
old_app.status = OldLoanStatus.WAITING.value
elif now >= max(old_app.end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
DispatchAppHistory.update(out_at=max(old_app.end_date, old_app.
promised_date or now)).where(DispatchAppHistory.bomber_id ==
old_app.bomber_id, DispatchAppHistory.application ==
application_id).execute()
else:
old_app.status = OldLoanStatus.PROCESSING.value
DispatchAppHistory.update(out_at=None).where(DispatchAppHistory
.bomber_id == old_app.bomber_id, DispatchAppHistory.
application == application_id).execute()
old_app.save()
return
application = Application.get_or_none(Application.id == application_id,
Application.status != ApplicationStatus.REPAID.value, Application.
overdue_days > 90, Application.promised_date.is_null(True) | (fn.
DATE(Application.promised_date) < datetime.today().date()))
if not application:
logging.error('Can not set old application %s to start collecting',
application_id)
return
if old_app.status in OldLoanStatus.no_available():
logging.info('%s has finished or paid', old_app.application_id)
return
config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)
sp = config.get(SCI.OLD_APP_PERIOD, SCI.OLD_APP_PERIOD.default_value)
old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value
old_app.status = OldLoanStatus.PROCESSING.value
old_app.bomber_id = old_app_bomber
old_app.start_date = datetime.now()
if not old_app.end_date:
old_app.end_date = datetime.now() + timedelta(days=sp)
old_app.save()
in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,
application_ids=[old_app.application_id], expected_out_time=str(
old_app.end_date))
<|reserved_special_token_0|>
def run_member_sql(sql):
result = [0, 0]
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
sql_result = cursor.fetchone()
if sql_result:
result = sql_result
except Exception as e:
logging.info('run sql error: %s' % str(sql))
return result
<|reserved_special_token_0|>
def get_before_bomber(date_time):
begin_time = str(date_time - timedelta(days=7))
end_time = str(date_time)
old_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at<date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (begin_time, begin_time))
old_data = run_one_sql(old_sql)
new_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at> '%s'
and created_at<'%s'
and overdue_days=1;
"""
% (begin_time, end_time))
new_data = run_one_sql(new_sql)
dpd4_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>date_add('%s',interval 1 day)
and created_at< date_add('%s',interval 1 day)
and overdue_days=4;
"""
% (begin_time, end_time))
dpd4_data = run_one_sql(dpd4_sql)
dpd2_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at< date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (end_time, end_time))
dpd2_data = run_one_sql(dpd2_sql)
all_money = old_data + new_data
repayment = all_money - dpd4_data - dpd2_data
pro = 0
if all_money:
pro = repayment / all_money * 100
RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,
proportion=pro, repayment=repayment)
<|reserved_special_token_0|>
def get_c1a_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=19)
repayment_sql = (
"""
select
sum(b.principal_part+b.late_fee_part) as paid_amount,
cdt
from
(select
br.principal_part, br.late_fee_part,
date(cdt) as cdt, br.repay_at, br.application_id
from (
select ba.id, ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
if not repayment:
return
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
for d in repayment:
repay = d[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
d[1], RepaymentReportInto.cycle == Cycle.C1A.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = round(repay, 3)
pro = repay / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<|reserved_special_token_0|>
def get_c2_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
not_contain_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=37)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_money and repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(
not_contain_money, 3), proportion='0', repayment=0,
is_first_loan=is_first_loan, contain_out=ContainOut.
NOT_CONTAIN.value)
for repay in not_contain_repay:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(all_money, 3),
proportion='0', repayment=0, is_first_loan=
is_first_loan, contain_out=ContainOut.CONTAIN.value)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
def get_c3_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id, ba.c3_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=30)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c3_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c3_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = '%s'
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 4
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C3.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
if not repayment:
return
for repay in repayment:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
repay[1], RepaymentReportInto.cycle == Cycle.C3.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = repay_money
pro = 0
if report.all_money and int(report.all_money):
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<|reserved_special_token_0|>
def get_static_bomber(begin_date):
active_date = begin_date - timedelta(days=8)
bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),
BomberR.last_active_at.alias('active')).where(BomberR.
last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])
summary = []
for bomber in bombers:
summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':
bomber.role.cycle, 'work_ind': 0})
SummaryBomber.insert_many(summary).execute()
<|reserved_special_token_0|>
@time_logger
def get_call_and_made(end_date, begin_date, real_time_query=False):
call_sql = (
"""
select
bomber_id,
count(case when relationship is not null then application_id end)
as 'call_cnt',
count(distinct case when relationship is not null then
application_id end) as 'call_case',
count(case when phone_status=4 then application_id end) as 'connect',
count(distinct case when phone_status=4 then application_id end)
as 'connect_case'
from (
select bomber_id,application_id,phone_status, cycle, relationship
from bomber.call_actions ba
where created_at>'%s' and created_at<'%s'
and type in (0, 1)
) a
group by 1
"""
% (begin_date, end_date))
calls = run_all_sql(call_sql)
if real_time_query:
return calls
for call in calls:
bomber, call_cnt, case_made, connect_cnt, case_connect = call
SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,
call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(
SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date
).execute()
return calls
@time_logger
def get_claimed_cnt(end_date, begin_date, real_time_query=False):
table_date = begin_date - timedelta(days=30)
claimed_sql = (
"""
SELECT
COUNT( `t1`.`application_id` ) AS cnt,
`t1`.`bomber_id` AS bomber_id
FROM
`dispatch_app_history` AS t1
WHERE
( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null )
AND ( `t1`.`bomber_id` != 1000 )
AND ( `t1`.`partner_id` IS null )
AND ( `t1`.`entry_at` > '%s' )
AND ( `t1`.`entry_at` < '%s' )
GROUP BY
`t1`.`bomber_id`
"""
% (begin_date, table_date, end_date))
claimeds = run_all_sql(claimed_sql)
if real_time_query:
return claimeds
for claimed in claimeds:
cnt, bomber_id = claimed
SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == bomber_id).execute()
return claimeds
def get_sms_data(end_data, begin_data):
all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(
'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')
).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.
created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())
).group_by(ConnectHistoryR.operator)
for sms in all_sms:
SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==
begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()
return all_sms
<|reserved_special_token_0|>
@action(MessageAction.SUMMARY_NEW)
def summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_active_bomber(begin_date)
get_call_and_made(end_date, begin_date)
get_claimed_cnt(end_date, begin_date)
get_sms_data(end_date, begin_date)
get_ptp_data(end_date, begin_date)
get_recover_amount(end_date, begin_date)
get_unfollowed(begin_date)
get_unfollowed_call(begin_date)
def get_new_case_amount(begin_date, end_date):
all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.
entry_late_fee_pending + DispatchAppHistoryR.
entry_principal_pending).alias('pending'), DispatchAppHistoryR.
bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')
).where(DispatchAppHistoryR.entry_at > begin_date,
DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.
partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)
for case in all_case:
SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt
=case.cnt).where(SummaryBomber.bomber_id == case.bomber_id,
SummaryBomber.time == begin_date).execute()
return all_case
<|reserved_special_token_0|>
def get_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c2_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c3_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
) a
GROUP BY 1
"""
% (begin_date, end_date, begin_date, end_date, begin_date, end_date))
case_cleared_sums = run_all_sql(sql)
for clear in case_cleared_sums:
SummaryBomber.update(new_case_cleared_sum=clear[1]).where(
SummaryBomber.time == begin_date, SummaryBomber.bomber_id ==
clear[0]).execute()
@time_logger
def get_new_case_call(begin_date, end_date, real_query_time=False):
sql = (
"""
SELECT
bd.bomber_id,
count( DISTINCT bd.application_id )
FROM
bomber.dispatch_app_history bd
INNER JOIN bomber.call_actions bc
ON bd.application_id = bc.application_id
AND bd.bomber_id = bc.bomber_id
AND date( bd.entry_at ) = date( bc.created_at )
WHERE
entry_at > '%s'
AND entry_at < '%s'
AND partner_id IS NULL
GROUP BY 1
"""
% (begin_date, end_date))
new_case_calls = run_all_sql(sql)
if real_query_time:
return new_case_calls
for call in new_case_calls:
SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber
.bomber_id == call[0], SummaryBomber.time == begin_date).execute()
return new_case_calls
@time_logger
def get_calltime_avg(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction,
count( 1 ) AS auto_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
AND an.recording is not null
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]][0] += data[1]
result[data[0]][1] += data[2]
continue
result[data[0]] = [data[1], data[2]]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=
value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == key).execute()
return result
def get_no_calltime_avg(begin_date, end_date):
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND (an.status!='ANSWERED' or an.recording is null)
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
for data in manuals:
SummaryBomber.update(calltime_no_case_sum=data[1],
calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /
data[2] if data[2] else 0).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == data[0]).execute()
@time_logger
def get_calltime_sum(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == key).execute()
return result
<|reserved_special_token_0|>
@action(MessageAction.UPDATE_SUMMARY_NEW)
def update_summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'UPDATE_SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_new_case_amount(begin_date, end_date)
get_kp_cleared(begin_date, end_date)
get_kp_today(begin_date, end_date)
get_ptp_cnt(begin_date, end_date)
get_ptp_call_cnt(begin_date, end_date)
get_new_case_cleared(begin_date, end_date)
get_new_case_call(begin_date, end_date)
get_calltime_avg(begin_date, end_date)
get_no_calltime_avg(begin_date, end_date)
get_calltime_sum(begin_date, end_date)
def get_cycle_claimed(begin_date, end_date):
sql = (
"""
select cycle,count(1)
from bomber.application where cycle in (1,2,3,4)
and (finished_at is null or (finished_at>'%s'))
and created_at>'2018-09-01'
group by 1
"""
% begin_date)
result = run_all_sql(sql)
return result
@time_logger
def cycle_new_case(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( ba.id ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.created_at ) = bo.which_day_overdue
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c1b_entry ) = bo.which_day_overdue
WHERE
c1b_entry > '%s'
AND c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c2_entry ) = bo.which_day_overdue
WHERE
c2_entry > '%s'
AND c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c3_entry ) = bo.which_day_overdue
WHERE
c3_entry > '%s'
AND c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
all_datas = run_all_sql(sql)
if real_time_query:
return all_datas
for data in all_datas:
SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()
return all_datas
@time_logger
def get_cycle_new_case_call(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.created_at ) = date( bc.created_at )
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c1b_entry ) = date( bc.created_at )
WHERE
ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c2_entry ) = date( bc.created_at )
WHERE
ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c3_entry ) = date( bc.created_at )
WHERE
ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
cycle_datas = run_all_sql(sql)
if real_time_query:
return cycle_datas
for data in cycle_datas:
SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber
.time == begin_date, SummaryBomber.cycle == data[0],
SummaryBomber.bomber_id == data[0]).execute()
return cycle_datas
def get_cycle_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
'1' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.created_at ) = date( br.repay_at )
WHERE ba.created_at > '%s'
AND ba.created_at < '%s'
GROUP BY 1, 2 ) a
UNION
SELECT
'2' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
GROUP BY 1, 2) a
"""
% (begin_date, end_date, begin_date, end_date))
cycle_cleared = run_all_sql(sql)
for i in cycle_cleared:
SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber
.cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.
time == begin_date).execute()
<|reserved_special_token_0|>
def get_change_bomber():
cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle
.C3.value}
result = {}
bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,
BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=
BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.
created_at) == date.today(), BomberLog.role_id << list(
cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.
instalment == 0).dicts()
for b_log in bomber_logs:
cycle = cycle_role_map.get(b_log['role_id'])
group_id = b_log['group_id']
if cycle in result:
if group_id not in result[cycle]:
result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],
'new_ids': []}
else:
result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],
'new_ids': []}}
if b_log['operation'] == 0:
result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])
if result:
bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map
.keys())), Bomber.is_del == 0, Bomber.instalment == 0)
for b in bombers:
cycle_result = result.get(cycle_role_map[b.role_id], {})
role_result = cycle_result.get(b.group_id)
if not role_result:
continue
role_result['new_ids'].append(b.id)
resutl_list = []
for cycle, group_dict in result.items():
resutl_list.extend(list(group_dict.values()))
return resutl_list
return []
<|reserved_special_token_0|>
def get_surplus_application(new_ids, del_ids, average_nums, classified_apps):
surplus_apps = []
for del_id in del_ids:
del_res = classified_apps.get(del_id, {})
p_list = del_res.get('p_list', [])
np_list = del_res.get('np_list', [])
del_res['need_num'] = -(len(p_list) + len(np_list))
del_res['to_list'] = np_list + p_list
surplus_apps.extend(p_list)
surplus_apps.extend(np_list)
for index, bid in enumerate(new_ids):
average = average_nums[index]
bomber_app = classified_apps.get(bid)
if not bomber_app:
bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid
).first()
bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':
[], 'np_num': 0, 'need_num': average, 'partner_id': bomber.
partner_id if bomber else ''}
classified_apps[bid] = bomber_app
else:
p_num = len(bomber_app['p_list'])
np_num = len(bomber_app['np_list'])
if p_num > average:
bomber_app['need_num'] = -np_num
else:
bomber_app['need_num'] = average - (p_num + np_num)
bomber_app['p_num'] = p_num
bomber_app['np_num'] = np_num
if bomber_app['need_num'] < 0:
random.shuffle(bomber_app['np_list'])
res_over = bomber_app['np_list'][:-bomber_app['need_num']]
bomber_app['to_list'] = res_over
surplus_apps.extend(res_over)
classified_apps_list = sorted(classified_apps.values(), key=lambda x: x
['need_num'], reverse=True)
return surplus_apps, classified_apps_list
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)
def bomber_dispatch_applications(payload, msg_id):
change_bombers = get_change_bomber()
instalment_change_bombers = get_instalment_change_bomber()
params = {ApplicationType.CASH_LOAN.value: change_bombers,
ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}
for type, bombers in params.items():
change_bomber_dispatch_apps(change_bombers=bombers, type=type)
<|reserved_special_token_0|>
def get_summary_daily_time():
mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')
mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')
now_date = datetime.now()
now_date_time = now_date.time()
today_str = str(now_date.date())
if now_date_time < mid_time_t1.time():
yes_date = now_date - timedelta(days=1)
yes_date_str = str(yes_date.date())
begin_str = yes_date_str + ' 17:20:00'
end_str = today_str + ' 00:00:00'
elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():
begin_str = today_str + ' 00:00:00'
end_str = today_str + ' 12:40:00'
else:
begin_str = today_str + ' 12:40:00'
end_str = today_str + ' 17:20:00'
begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')
end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')
summary_datetime = now_date - timedelta(minutes=30)
summary_date = summary_datetime.date()
return begin_time, end_time, summary_date
@action(MessageAction.SUMMARY_DAILY)
def summary_daily_data(payload, msg_id):
begin_time, end_time, summary_date = get_summary_daily_time()
call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.
bomber_id, CallActionsR.application_id, CallActionsR.promised_date,
CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(
CallActionsR.created_at >= begin_time, CallActionsR.created_at <
end_time, CallActionsR.type << (0, 1))
summary_dailys = {}
for call in call_actions:
if call.bomber_id not in summary_dailys:
summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.
bomber_id, 'summary_date': str(summary_date)}
if call.name and call.number:
summary_dailys[call.bomber_id]['call_cnt'] += 1
if call.promised_date:
summary_dailys[call.bomber_id]['ptp_cnt'] += 1
C1_sql = (
"""
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,a.cycle
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,2,4,5)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
"""
% (begin_time, end_time))
C1_repayment = run_all_sql(C1_sql)
other_sql = (
"""
select current_bomber_id,sum(pay_amount) as pay_amount,cycle
from (
select application_id,current_bomber_id,pay_amount,repay_at,cycle
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at,
br.cycle
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (3,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
"""
% (begin_time, end_time))
other_repayment = run_all_sql(other_sql)
all_repayment = C1_repayment + other_repayment
for res in all_repayment:
bomber_id, pay_amount, cycle = res
if bomber_id in summary_dailys:
summary_dailys[bomber_id]['repayment'] += pay_amount
else:
summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': cycle, 'repayment': pay_amount, 'bomber_id':
bomber_id, 'summary_date': str(summary_date)}
insert_values = list(summary_dailys.values())
if insert_values:
SummaryDaily.insert_many(insert_values).execute()
<|reserved_special_token_0|>
def get_app_logs(apps):
app_logs = {}
all_np_apps = []
all_p_apps = []
for a in apps:
latest_bomber = a['latest_bomber']
latest_bomber = a['cycle'] if not latest_bomber else latest_bomber
if latest_bomber in app_logs:
app_logs[latest_bomber]['to_ids'].append(a['id'])
else:
app_logs[latest_bomber] = {'bomber_id': latest_bomber, 'to_ids':
[a['id']], 'np_ids': [], 'p_ids': []}
if a['promised_date'] and a['promised_date'].date() >= datetime.now(
).date():
app_logs[latest_bomber]['p_ids'].append(a['id'])
all_p_apps.append(a)
else:
app_logs[latest_bomber]['np_ids'].append(a['id'])
all_np_apps.append(a)
return app_logs, all_np_apps, all_p_apps
def month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):
apps = list(apps)
np_apps = list(np_apps)
random.shuffle(np_apps)
apps_len = len(apps)
np_apps_len = len(np_apps)
end = 0
all_app_precentage = 0
partners = Partner.select().where(Partner.cycle == cycle, Partner.
status == PartnerStatus.NORMAL.value)
for p in partners:
all_app_precentage += p.app_percentage
for partner in partners:
bombers = Bomber.select().where(Bomber.partner == partner.id,
Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.
value)
bids = {b.id: b for b in bombers}
if len(bids) == 0:
logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))
continue
start = end
if np_apps_len >= int(apps_len * all_app_precentage):
end = start + int(apps_len * partner.app_percentage)
else:
end = start + int(np_apps_len * partner.app_percentage /
all_app_precentage)
partner_app = np_apps[start:end]
dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)
np_apps = np_apps[end:]
return np_apps
def month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):
sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',
(4): 'AB_TEST_C3'}
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0
)
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
bombers = bombers.where(Bomber.instalment == 0)
bids = {b.id: b for b in bombers}
if cycle == Cycle.C1A.value:
np_ids = [a['id'] for a in np_apps]
np = Application.update(status=ApplicationStatus.PROCESSING.value,
ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids
).execute()
bomber_app_logs = app_logs.get(cycle, {})
out_param = {'application_ids': bomber_app_logs.get('to_ids', []),
'month_dispatch': 1, 'src_bomber_id': cycle}
new_out_record(**out_param)
in_param = {'cycle': cycle, 'application_ids': np_ids,
'dest_bomber_id': cycle}
new_in_record(**in_param)
bomber_app_logs['need_num'] = len(np_apps)
bomber_app_logs['form_ids'] = np_ids
bomber_app_logs['status'] = 1
else:
dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)
dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)
def dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,
type=ApplicationType.CASH_LOAN.value):
apps = list(apps)
random.shuffle(apps)
bids_list = list(bids.keys())
if len(bids_list) <= 0:
logging.info('get_dispatch_app_to_bomber no bids')
return
average_num = get_average_number(len(apps), len(bids_list))
bomber_end = 0
with db.atomic():
for index, bid in enumerate(bids_list):
current_bomber = bids.get(bid)
bomber_app_logs = app_logs.get(bid, {})
bomber_start = bomber_end
bomber_end = bomber_start + average_num[index]
bomber_apps = apps[bomber_start:bomber_end]
from_p, from_np, from_ids, status = [], [], [], 0
for ba in bomber_apps:
promised_date = ba.get('promised_date')
from_ids.append(ba['id'])
if promised_date and promised_date.date() >= date.today():
from_p.append(ba['id'])
else:
from_np.append(ba['id'])
app_status = ApplicationStatus.AB_TEST.value
if (cycle == Cycle.C1A.value and not out_partner and type ==
ApplicationType.CASH_LOAN.value):
app_status = ApplicationStatus.PROCESSING.value
if from_p:
p = Application.update(ptp_bomber=bid, latest_bomber=bid,
status=app_status).where(Application.id << from_p).execute(
)
p_ids = bomber_app_logs.get('p_ids', []) + from_p
bomber_app_logs['p_ids'] = p_ids
if from_np:
np = Application.update(latest_bomber=bid, ptp_bomber=None,
status=ApplicationStatus.AB_TEST.value).where(
Application.id << from_np).execute()
np_ids = bomber_app_logs.get('np_ids', []) + from_np
bomber_app_logs['np_ids'] = np_ids
in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.
partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}
if type == ApplicationType.CASH_LOAN.value:
out_param = {'src_bomber_id': bid, 'application_ids':
bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}
new_out_record(**out_param)
new_in_record(**in_param)
else:
out_and_in_record_instalment(**in_param)
bomber_app_logs['status'] = 1
need_num = bomber_app_logs.get('need_num', 0) + average_num[index]
bomber_app_logs['need_num'] = need_num
all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids
bomber_app_logs['form_ids'] = all_form_ids
if not out_partner:
continue
try:
DispatchApp.delete().where(DispatchApp.application.in_(
from_ids)).execute()
dispatch_ins = [{'application': id, 'partner':
current_bomber.partner_id, 'bomber': bid, 'status':
DisAppStatus.NORMAL.value} for id in from_ids]
DispatchApp.insert_many(dispatch_ins).execute()
except Exception as e:
logging.info(
'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str
(e), bid, from_ids))
<|reserved_special_token_0|>
def calc_entry_time(overdue_days):
app_entry_time = {}
overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],
'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}
for key, value in overdue_entry.items():
if value[0] <= overdue_days <= value[1]:
app_entry_time[key] = datetime.now()
else:
app_entry_time[key] = None
return app_entry_time
<|reserved_special_token_0|>
@action(MessageAction.SUMMARY_BOMBER_OVERDUE)
def summary_bomber_overdue_everyday(payload, msg_id):
cycle_list = Cycle.values()
which_day = date.today()
for cycle in cycle_list:
apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,
ApplicationR.ptp_bomber, ApplicationR.overdue_days,
ApplicationR.promised_date, ApplicationR.follow_up_date,
ApplicationR.external_id, OverdueBillR.status, OverdueBillR.
periods, OverdueBillR.sub_bill_id).join(OverdueBillR,
JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id
).where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()
bomber_overdue_list = []
for app in apps:
status = app.get('status')
if status == ApplicationStatus.REPAID.value:
continue
ptp_bomber = app.get('ptp_bomber')
promised_date = app.get('promised_date')
follow_up_date = app.get('follow_up_date')
if not promised_date or promised_date.date() < date.today():
ptp_bomber = promised_date = None
if not follow_up_date or follow_up_date.date() < date.today():
follow_up_date = None
overdue_dict = {'collection_id': app.get('id'), 'external_id':
app.get('external_id'), 'sub_bill_id': app.get(
'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app
.get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':
ptp_bomber, 'promised_date': promised_date,
'follow_up_date': follow_up_date, 'which_day': which_day,
'overdue_days': app.get('overdue_days')}
bomber_overdue_list.append(overdue_dict)
try:
if bomber_overdue_list:
with db.atomic():
for index in range(0, len(bomber_overdue_list), 1000):
insert_list = bomber_overdue_list[index:index + 1000]
BomberOverdue.insert_many(insert_list).execute()
except Exception as e:
logging.info(
'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %
(cycle, str(which_day), str(e)))
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)
def today_ptp_auto_call_switch(payload, msg_id):
today = datetime.today().date()
next_day = today + timedelta(days=1)
apps = ApplicationR.select(ApplicationR.latest_bomber).where(
ApplicationR.promised_date < next_day, ApplicationR.promised_date >=
today, ApplicationR.promised_date.is_null(False), ApplicationR.
status != ApplicationStatus.REPAID.value, ApplicationR.cycle <
Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(
ApplicationR.latest_bomber)
bids = [a.latest_bomber_id for a in apps]
if not bids:
return
q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(
BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids
).execute()
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)
def update_today_switch_every_day(payload, msg_id):
q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(
BomberPtp.auto_ext.is_null(False)).execute()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_HEALTH_CHECK)
def health_check(payload, msg_id):
pass
<|reserved_special_token_0|>
def dpd1_process(lst):
"""已废弃的方法"""
if not lst:
return
for key, l in lst.items():
rule = getattr(BeforeInBomber, key).value
query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)
).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(
rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.
call_success()))
success_set = {i.loanid for i in query}
failed_list = list(set(l) - success_set)
post_params = {'$and': rule.get('$and'), 'app_list': failed_list}
resp = Hyperloop().post('/bomber/score/verify', json=post_params)
if not resp.ok:
logging.error('hyperloop score verification failed: %s, %s',
str(resp.status_code), str(resp.text))
logging.error('hyperloop score verification failed: %s', str(
post_params))
continue
logging.debug('hyperloop score verification success: %s', resp.content)
resp_json = resp.json()
app_list = resp_json['data']
if not app_list:
continue
for item in app_list:
if random.randint(0, 5) == 1:
send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':
int(item)})
@action(MessageAction.GET_IVR)
def get_ivr(payload, msg_id):
logging.warning('start get_ivr')
sys_config = SystemConfig.select().where(SystemConfig.key ==
'DPD1-3_INTO_IVR').first()
now = date.today()
if sys_config and sys_config.value:
start = now - timedelta(days=3)
else:
start = now
end = now + timedelta(days=4)
item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()
if not item:
AutoIVR.delete().execute()
current_page = 0
elif item.current_page >= item.total_page:
return
else:
current_page = item.current_page
auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,
'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,
'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':
10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,
'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,
'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,
'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,
'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,
'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,
'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,
'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,
'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,
'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,
'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':
101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,
'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,
'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,
'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}
current_page += 1
with db.atomic() as transaction:
while True:
bill_service = BillService()
ivr_action = bill_service.ivr_pages(page=current_page,
page_size=500, start_time=utc_datetime(str(start)),
end_time=utc_datetime(str(end)))
result = ivr_action['result']
page_size = int(ivr_action.get('page_size', 0))
total_page = int(ivr_action.get('total_page', 0))
insert_args = []
for a in result:
due_at = no_utc_datetime(a['due_at'])
days = (due_at.date() - now).days
if days == 2:
continue
if days > 0:
time = str(days)
else:
time = str(days).replace('-', 'PDP')
key = a['app_name'] + str(a['su']) + time
group = auto_ivr.get(key)
user_id = a['user_id']
try:
user_resp = AccountService().get_user(path_params={
'user_id': user_id})
if str(user_resp['mobile_no']) == str(a['user_mobile_no']):
numbers = a['user_mobile_no']
else:
numbers = a['user_mobile_no'] + ',' + user_resp.get(
'mobile_no')
except:
logging.error('Request Account Service Error.')
numbers = a['user_mobile_no']
insert_args.append({'application_id': a['id'], 'numbers':
numbers, 'group': group, 'user_id': user_id})
AutoIVR.insert_many(insert_args).execute()
if current_page == 1:
IVRActionLog.create(total_page=total_page, proc_date=now,
page_size=page_size, current_page=current_page)
item = IVRActionLog.get(IVRActionLog.proc_date == now)
else:
item.current_page = current_page
item.page_size = page_size
item.total_page = total_page
item.save()
transaction.commit()
current_page += 1
if current_page > int(total_page):
break
if sys_config and sys_config.value:
try:
classfiy_dpd_ptp_apps()
except Exception as e:
logging.error('dpd1-3_test_error:%s' % str(e))
def ivr_t2_test():
t2_groups = [39, 40, 41, 42, 43, 44]
ivr_test_proportion = 0.2
sys_config = SystemConfig.select().where(SystemConfig.key ==
'IVR_TEST_PROPORTION').first()
if sys_config and sys_config.value:
ivr_test_proportion = float(sys_config.value)
t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.
status == AutoIVRStatus.AVAILABLE.value)
t2_dict = defaultdict(list)
for ivr in t2_ivrs:
t2_dict[ivr.group].append(ivr.id)
test_ivr_ids = []
for group, ivr_ids in t2_dict.items():
number = ceil(len(ivr_ids) * ivr_test_proportion)
test_ivr_ids += ivr_ids[:number]
if not test_ivr_ids:
return
q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.
group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()
<|reserved_special_token_0|>
@action(MessageAction.APP_MERGE)
@deprecated(version='1.0', reason='This function will be removed soon')
def app_merge(payload, msg_id):
sql = """
select *
from (
select a.id as id
from dashboard.application as a
inner join repayment.bill2 as b on b.external_id = a.id
where not exists (
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.app = 'DanaCepat'
and a.is_first_loan = 1
and a.apply_at < '2018-08-23 20:50:00'
and b.overdue_days between 1 and 3
and b.status != 2) result
where not exists (
select 1
from bomber.application as a
where a.cycle = 1
and a.status = 4
and a.id = result.id
)
"""
cursor = readonly_db.get_cursor()
cursor.execute(sql)
new_data = cursor.fetchall()
cursor.close()
if new_data:
bomber = [103, 104]
for d in new_data:
app_id = {'id': d[0]}
application_overdue(app_id, None)
Application.update(status=ApplicationStatus.AB_TEST.value,
latest_bomber=random.choice(bomber), ptp_bomber=None).where(
Application.id == d[0]).execute()
logging.warning('add new app success')
ptp = date.today() - timedelta(days=1)
del_sql = (
"""
select a.id
from bomber.application as a
where exists(
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.cycle = 1
and a.status = 4
and (a.promised_date is null or a.promised_date < "%s")
"""
% ptp)
cursor = readonly_db.get_cursor()
cursor.execute(del_sql)
del_date = cursor.fetchall()
cursor.close()
if del_date:
return
ids = list()
for d in del_date:
ids.append(d[0])
Application.update(status=ApplicationStatus.UNCLAIMED.value,
latest_bomber=None).where(Application.id << ids).execute()
@action(MessageAction.APPLICATION_BOMBER)
def application_overdue(payload, msg_id):
application_id = payload['id']
sub_bill_id = payload['bill_sub_id']
local_app = Application.select().where(Application.external_id ==
application_id).order_by(Application.finished_at).first()
if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:
logging.info('application %s overdue, already exists', application_id)
add_contact(local_app)
return
if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:
overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==
sub_bill_id, OverdueBillR.external_id == application_id)
if overdue_bill.exists():
logging.info(
'application %s,sub_bill_id %s overdue, already exists' % (
application_id, sub_bill_id))
return
try:
sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
sub_bill = sub_bill[0]
except Exception:
logging.error(
'application %s overdue, get sub_bill info failed:Request To repayment Error'
, application_id)
return
if sub_bill['status'] == 2:
logging.error('application %s overdue, but bills already cleared',
application_id)
return
overdue_days = sub_bill.get('overdue_days', 0)
if overdue_days == 0:
logging.info('application {} no overdue'.format(str(application_id)))
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
logging.error('get application %s failed: Request to GoldenEye.',
application_id)
return
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
apply_history = Dashboard().get('/users/%s/apply-history' % user_id)
if not apply_history.ok:
logging.error(
'get user %s apply history failed: Request to Dashboard Failed.',
user_id)
return
history = apply_history.json().get('data')
loan_success_times = len([(1) for i in history if i['status'] in [80,
90, 100, 70] and i['id'] != gold_app['id']])
id = application_id
type = ApplicationType.CASH_LOAN.value
bill_id = sub_bill.get('bill_id')
amount = sub_bill.get('amount')
amount_net = sub_bill.get('amount_net')
interest_rate = sub_bill.get('interest_rate')
overdue_days = sub_bill.get('overdue_days')
origin_due_at = sub_bill.get('origin_due_at')
sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,
'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),
'overdue_days': overdue_days, 'origin_due_at': origin_due_at,
'amount': amount, 'amount_net': amount_net, 'interest_rate':
interest_rate, 'external_id': application_id}
if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:
if local_app and local_app.status != ApplicationStatus.REPAID.value:
sub_overdue_bill['collection_id'] = local_app.id
local_app.amount += amount
local_app.amount_net += amount_net
local_app.save()
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('application %s,sub_bill_id:%s overdue created' %
(application_id, sub_bill_id))
return
else:
id = idg()
type = ApplicationType.CASH_LOAN_STAGING.value
sub_overdue_bill['collection_id'] = id
ptp_info = BombingHistory.filter(BombingHistory.application == id).first()
promised_amount = ptp_info and ptp_info.promised_amount
promised_date = ptp_info and ptp_info.promised_date
application = Application.create(id=id, user_id=gold_app['user_id'],
user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[
'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],
contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(
'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=
birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(
gold_app.get('id_ektp')), profile_province=(gold_app.get(
'profile_province') or {}).get('name'), profile_city=(gold_app.get(
'profile_city') or {}).get('name'), profile_district=(gold_app.get(
'profile_district') or {}).get('name'), profile_residence_time=
gold_app.get('profile_residence_time'), profile_residence_type=
gold_app.get('profile_residence_type'), profile_address=gold_app.
get('profile_address'), profile_education=gold_app.get(
'profile_education'), profile_college=(gold_app.get(
'profile_college') or {}).get('name'), job_name=gold_app.get(
'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get
('job_bpjs'), job_user_email=gold_app.get('job_user_email'),
job_type=gold_app.get('job_type'), job_industry=gold_app.get(
'job_industry'), job_department=gold_app.get('job_department'),
job_province=(gold_app.get('job_province') or {}).get('name'),
job_city=(gold_app.get('job_city') or {}).get('name'), job_district
=(gold_app.get('job_district') or {}).get('name'), job_address=
gold_app.get('job_address'), amount=amount, amount_net=amount_net,
interest_rate=interest_rate, term=gold_app.get('term'),
origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=
sub_bill.get('repay_at'), loan_success_times=loan_success_times,
arrived_at=datetime.now(), follow_up_date=datetime.now(),
promised_amount=promised_amount, promised_date=promised_date,
external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=
datetime.now())
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('overdue application %s created', application_id)
Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)
add_contact(application)
def add_contact(application):
logging.info('start add contact for application: %s', application.id)
contacts = Contact.filter(Contact.user_id == application.user_id)
existing_numbers = {contact.number for contact in contacts}
insert_contacts = list()
mon_insert_contact = {}
user_mobile_no = number_strip(application.user_mobile_no)
if user_mobile_no and user_mobile_no not in existing_numbers:
insert_contacts.append({'user_id': application.user_id, 'name':
application.user_name, 'number': user_mobile_no, 'relationship':
Relationship.APPLICANT.value, 'source': 'apply info',
'real_relationship': Relationship.APPLICANT.value})
existing_numbers.add(number_strip(application.user_mobile_no))
extra_phone = GoldenEye().get('/users/%s/extra-phone' % application.user_id
)
if not extra_phone.ok:
extra_phone = []
logging.error('get user %s extra contacts failed', application.user_id)
else:
extra_phone = extra_phone.json()['data']
if extra_phone:
for i in extra_phone:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({'user_id': application.user_id, 'name':
application.user_name, 'number': number, 'relationship':
Relationship.APPLICANT.value, 'source': 'extra phone',
'real_relationship': Relationship.APPLICANT.value})
key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
ec_contact = []
contact = json.loads(application.contact or '[]')
for i in contact:
if number_strip(i['mobile_no']
) not in existing_numbers and number_strip(i['mobile_no']):
ec_contact.append({'user_id': application.user_id, 'name': i[
'name'], 'number': number_strip(i['mobile_no']),
'relationship': Relationship.FAMILY.value, 'sub_relation':
SubRelation.EC.value, 'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number_strip(i['mobile_no']
), ContactType.F_EC.value
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['mobile_no']))
if i['type'] != 1:
continue
if number_strip(i['tel_no']) not in existing_numbers and number_strip(i
['tel_no']):
ec_contact.append({'user_id': application.user_id, 'name': i[
'name'], 'number': number_strip(i['tel_no']),
'relationship': Relationship.FAMILY.value, 'sub_relation':
SubRelation.EC.value, 'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number_strip(i['tel_no']
), ContactType.F_EC.value
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['tel_no']))
if ec_contact:
Contact.insert_many(ec_contact).execute()
if all((application.job_tel, number_strip(application.job_tel),
number_strip(application.job_tel) not in existing_numbers)):
insert_contacts.append({'user_id': application.user_id, 'name':
None, 'number': number_strip(application.job_tel),
'relationship': Relationship.COMPANY.value, 'source':
'basic info job_tel', 'real_relationship': Relationship.COMPANY
.value})
key = user_mobile_no, number_strip(application.job_tel
), ContactType.C_BASIC_INFO_JOB_TEL.value
mon_insert_contact[key] = 1, 0, None
existing_numbers.add(number_strip(application.job_tel))
sms_contacts = GoldenEye().get('/applications/%s/sms-contacts' %
application.external_id)
if not sms_contacts.ok:
sms_contacts = []
logging.info('get user %s sms contacts failed', application.external_id
)
else:
sms_contacts = sms_contacts.json()['data']
if sms_contacts:
for i in sms_contacts:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({'user_id': application.user_id, 'name':
i['name'][:128], 'number': number, 'relationship':
Relationship.SUGGESTED.value, 'source': 'sms contacts',
'real_relationship': Relationship.SUGGESTED.value})
key = user_mobile_no, number, ContactType.S_SMS_CONTACTS.value
mon_insert_contact[key] = 1, 0, i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
cf = GoldenEye().get('/applications/%s/call/frequency' % application.
external_id)
if not cf.ok:
call_frequency = []
logging.error('get application %s call frequency error',
application.external_id)
else:
call_frequency = cf.json()['data']
insert_contacts = []
fm = GoldenEye().get('/applications/%s/contact/family-member' %
application.external_id)
if not fm.ok:
family = []
logging.error('get application %s family-member info error',
application.external_id)
else:
family = fm.json()['data']
if family:
for i in family:
if not i.get('number'):
logging.info('family member %s' % str(i))
continue
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
logging.info('family members: %s' % str(i))
insert_contacts.append({'user_id': application.user_id, 'name':
i['name'][:128], 'number': number, 'relationship':
Relationship.FAMILY.value, 'source': FamilyContactType.
CALLEC.value, 'total_count': i.get('total_count', 1),
'total_duration': i.get('total_duration', 0),
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number, ContactType.F_CALL_EC.value
mon_insert_contact[key] = i.get('total_count', 1), i.get(
'total_duration', 0), i['name'][:128]
existing_numbers.add(number)
mon_update_contact = {}
if call_frequency:
with db.atomic():
count = 1
for i in call_frequency:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
Contact.update(total_count=i['total_count'],
total_duration=i['total_duration']).where(Contact.
number == number, Contact.user_id == application.
user_id)
key = user_mobile_no, number
mon_update_contact[key] = i['total_count'], i[
'total_duration']
continue
if count < 6:
insert_contacts.append({'user_id': application.user_id,
'name': i['name'][:128], 'number': number,
'relationship': Relationship.FAMILY.value,
'total_count': i['total_count'], 'total_duration':
i['total_duration'], 'source': FamilyContactType.
CALLTOP5.value, 'real_relationship': Relationship.
FAMILY.value})
count += 1
key = user_mobile_no, number, ContactType.F_CALL_TOP5.value
mon_insert_contact[key] = i['total_count'], i[
'total_duration'], i['name'][:128]
else:
insert_contacts.append({'user_id': application.user_id,
'name': i['name'][:128], 'number': number,
'relationship': Relationship.SUGGESTED.value,
'total_count': i['total_count'], 'total_duration':
i['total_duration'], 'source': 'call frequency',
'real_relationship': Relationship.SUGGESTED.value})
key = (user_mobile_no, number, ContactType.
S_CALL_FREQUENCY.value)
mon_insert_contact[key] = i['total_count'], i[
'total_duration'], i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
next_apply_list = AccountService().add_contact(application.user_id)
for next_apply in next_apply_list:
number = number_strip(str(next_apply))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=application.
user_name, number=number, relationship=Relationship.
SUGGESTED.value, source='online profile phone',
real_relationship=Relationship.SUGGESTED.value)
key = (user_mobile_no, number, ContactType.
S_ONLINE_PROFILE_PHONE.value)
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
next_applicant = GoldenEye().get('/bomber/%s/dual_contact' %
application.user_id)
if not next_applicant.ok:
next_applicant = []
logging.error('get user %s dual_contact contacts failed' %
application.user_id)
else:
next_applicant = next_applicant.json()['data']
if next_applicant:
for i in next_applicant:
number = number_strip(str(i))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=
application.user_name, number=number, relationship=
Relationship.APPLICANT.value, source='apply info',
real_relationship=Relationship.APPLICANT.value)
key = user_mobile_no, number, ContactType.A_APPLY_INFO.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
numbers = []
try:
numbers = AccountService().ktp_number(path_params={'user_id':
application.user_id})
except Exception as e:
logging.info('request ktp numbers failed %s' % str(e))
for n in numbers:
number = number_strip(str(n))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=application.
user_name, number=number, relationship=Relationship.
APPLICANT.value, source='ktp number', real_relationship=
Relationship.APPLICANT.value)
key = user_mobile_no, number, ContactType.A_KTP_NUMBER.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
try:
ecs = GoldenEye().get('/applications/%s/contact/ec' % application.
external_id)
except Exception as e:
logging.info('request ec-member error: %s' % str(e))
try:
if not ecs.ok:
ec = []
logging.info('get application %s ec-member info error',
application.external_id)
else:
ec = ecs.json()['data']
if ec:
for e in ec:
number = number_strip(e['numbers'])[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=e[
'name'][:128], number=number, relationship=
Relationship.FAMILY.value, source=FamilyContactType
.CONTACTEC.value, real_relationship=Relationship.
FAMILY.value)
key = (user_mobile_no, number, ContactType.F_CONTACT_EC
.value)
mon_insert_contact[key] = 1, 0, e['name'][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add ec_member error:%s' % str(e))
try:
mn = GoldenEye().get('/applications/%s/contact/my_number' %
application.external_id)
except Exception as e:
logging.info('request my_number error: %s' % str(e))
try:
if not mn.ok:
my = []
logging.info('get application %s my_number info error',
application.external_id)
else:
my = mn.json()['data']
if my:
for m in my:
number = number_strip(m)[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=my[m][
:128], number=number, relationship=Relationship.
SUGGESTED.value, source='my number',
real_relationship=Relationship.SUGGESTED.value)
key = user_mobile_no, number, ContactType.S_MY_NUMBER.value
mon_insert_contact[key] = 1, 0, my[m][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add my_member error:%s' % str(e))
try:
cn = GoldenEye().get('/applications/%s/contact/company-number' %
application.external_id)
except Exception as e:
logging.info('request company-number error: %s' % str(e))
try:
if not cn.ok:
cn = []
logging.info('get application %s company_number info error',
application.external_id)
else:
cn = cn.json()['data']
if cn:
for c in cn:
number = c
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=cn[c][
:128], number=number, relationship=Relationship.
COMPANY.value, source='company', real_relationship=
Relationship.COMPANY.value)
key = user_mobile_no, number, ContactType.C_COMPANY.value
mon_insert_contact[key] = 1, 0, cn[c][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add company_member error:%s' % str(e))
try:
ol = AccountService().other_login_contact(userId=application.user_id)
except Exception as e:
logging.error('request other_login error: %s' % e)
ol = {}
try:
for o in ol:
number = number_strip(o)
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=ol[o][:128
], number=number, relationship=Relationship.SUGGESTED.
value, source='other_login', real_relationship=
Relationship.SUGGESTED.value)
key = user_mobile_no, number, ContactType.S_OTHER_LOGIN.value
mon_insert_contact[key] = 1, 0, ol[o][:128]
except Exception as e:
logging.error('add other_login number error:%s' % e)
logging.info('add contact for application %s finished', application.id)
if mon_insert_contact or mon_update_contact:
send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON, {
'user_mobile_no': user_mobile_no, 'insert_contact': str(
mon_insert_contact), 'update_contact': str(mon_update_contact),
'user_id': application.user_id, 'name': application.user_name})
<|reserved_special_token_0|>
def get_contact_from_mongo(number):
if not number:
return []
query = TotalContact.objects(src_number=number, source__in=TotalContact
.available()).order_by('source')
lst = []
for c in query:
relation = TotalContact.relationship(c.source)
if relation == -1:
continue
source = TotalContact.str_source(c.source)
if not source:
continue
lst.append({'related_number': c.dest_number, 'source': source,
'is_calc': c.is_calc, 'total_count': c.total_count,
'total_duration': c.total_duration, 'relation': relation,
'name': c.dest_name})
return lst
<|reserved_special_token_0|>
def check_key_not_none(payload, keys):
for key in keys:
if payload.get(key) is None:
logging.error('Missing args {}'.format(str(key)))
return False
return True
@action(MessageAction.BILL_PAID)
def bill_paid(payload, msg_id):
validate = check_key_not_none(payload, ['external_id', 'late_fee_part',
'principal_part', 'paid_at', 'bill_sub_id', 'partner_bill_id'])
if not validate:
logging.error('payload key not fully pass in.')
return
external_id = payload['external_id']
late_fee_part = Decimal(payload['late_fee_part'])
principal_part = Decimal(payload['principal_part'])
paid_at = payload['paid_at']
partner_bill_id = payload['partner_bill_id']
logging.debug(
'application %s paid principal part %s, paid late fee part %s',
external_id, principal_part, late_fee_part)
application = Application.filter(Application.external_id == external_id
).order_by(-Application.created_at).first()
if not application:
logging.info('application %s paid, not found application', external_id)
return
sub_bill_id = payload['bill_sub_id']
overdue_bill = OverdueBillR.select().where(OverdueBillR.collection_id ==
application.id, OverdueBillR.sub_bill_id == sub_bill_id).first()
if (application.type == ApplicationType.CASH_LOAN_STAGING.value and not
overdue_bill):
logging.info('bill sub not in bomber %s', sub_bill_id)
return
with db.atomic():
repay_at = str_no_utc_datetime(payload['latest_repay_at'])
Application.update(repay_at=repay_at).where(Application.id ==
application.id).execute()
RepaymentLog.create(application=application.id, is_bombed=True,
current_bomber=application.latest_bomber_id, cycle=application.
cycle, principal_part=principal_part, late_fee_part=
late_fee_part, repay_at=paid_at, ptp_bomber=application.
ptp_bomber, latest_call=application.latest_call, periods=
overdue_bill.periods if overdue_bill else None, overdue_bill_id
=overdue_bill.id if overdue_bill else None, partner_bill_id=
partner_bill_id)
phone_status = PhoneStatus.CONNECTED.value
real_relationship = RealRelationship.user_values()
commit = CallActionCommit.NO.value
number = CallActions.select(CallActions.number).where(CallActions.
phone_status == phone_status, CallActions.real_relationship <<
real_relationship, CallActions.commit == commit, CallActions.
application == application.id).order_by(-CallActions.created_at
).first()
if number:
Contact.update(call_priority=PriorityStatus.REPAY.value).where(
Contact.user_id == application.user_id, Contact.
call_priority == PriorityStatus.LAST.value).execute()
Contact.update(call_priority=PriorityStatus.LAST.value).where(
Contact.user_id == application.user_id, Contact.number ==
number.number).execute()
if not application.latest_bomber_id:
return
Inbox.create(title='application %s,sub_bill_id %s repaid' % (
application.external_id, sub_bill_id), content=
'application %s,sub_bill_id %s repaid' % (application.
external_id, sub_bill_id), receiver=application.
latest_bomber_id or application.last_bomber_id, category=
InboxCategory.REPAID.value)
@action(MessageAction.BILL_RELIEF)
def bill_relief(payload, msg_id):
"""已废弃"""
bill = payload['head_bill']
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_row = Application.update(repay_at=repay_at).where(Application.
id == bill['external_id']).execute()
logging.info('application %s bill relief done', bill['external_id'])
return updated_row
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)
def calc_overdue_days_over(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query = Application.update(overdue_days=overdue_days).where(Application
.status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.
overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)
updated_rows_count = query.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count)
try:
calc_overdue_days_over_instalment()
except Exception as e:
logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))
apps = Application.filter(Application.status << [ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value], Application.overdue_days > 95,
Application.promised_date.is_null(True) | (fn.DATE(Application.
promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {
'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
def calc_overdue_days_over_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days > 95)
updated_rows_count = query.execute()
logging.info(
'calc_overdue_days_over_instalment done,count:%s,status:%s' % (
updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.collection_id,
OverdueBill.overdue_days).join(Application, JOIN_LEFT_OUTER, on
=OverdueBill.collection_id == Application.id).where(Application
.status == status, Application.type == ApplicationType.
CASH_LOAN_STAGING.value)
app_update = {}
for ob in overdue_bills:
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)
def calc_overdue_days(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query_unclaimed = Application.update(overdue_days=overdue_days).where(
Application.status == ApplicationStatus.UNCLAIMED.value,
Application.overdue_days <= 95, Application.type == ApplicationType
.CASH_LOAN.value)
updated_rows_count_unclaimed = query_unclaimed.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_unclaimed)
query_processing = Application.update(overdue_days=overdue_days).where(
Application.status == ApplicationStatus.PROCESSING.value,
Application.overdue_days <= 95, Application.type == ApplicationType
.CASH_LOAN.value)
updated_rows_count_processing = query_processing.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_processing)
query_test = Application.update(overdue_days=overdue_days).where(
Application.status == ApplicationStatus.AB_TEST.value, Application.
overdue_days <= 95, Application.type == ApplicationType.CASH_LOAN.value
)
updated_rows_count_test = query_test.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_test)
calc_overdue_days_instalment()
apps = Application.select(Application.id).where(Application.status << [
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value, ApplicationStatus.AB_TEST.value], Application.overdue_days <=
95, Application.promised_date.is_null(True) | (fn.DATE(Application.
promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {
'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
Application.update(C1A_entry=datetime.now()).where(Application.status <<
[ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value, ApplicationStatus.AB_TEST.value], Application.overdue_days == 4
).execute()
def calc_overdue_days_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
today_now_time = datetime.now()
month_first_day = today_now_time.replace(day=1, hour=1, minute=30,
second=0, microsecond=0)
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days <= 95)
updated_rows_count = query.execute()
logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %
(updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.
created_at, OverdueBill.collection_id, OverdueBill.overdue_days
).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.
collection_id == Application.id).where(Application.status ==
status, Application.type == ApplicationType.CASH_LOAN_STAGING.value
)
app_update = {}
for ob in overdue_bills:
if (ob.status == ApplicationStatus.REPAID.value and ob.
created_at < month_first_day):
continue
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)
def automatic_escalation(payload, msg_id):
app_ids = payload.get('application_list', [])
if not app_ids:
return
apps = Application.select().where(Application.id.in_(app_ids),
Application.status != ApplicationStatus.REPAID.value)
for a in apps:
new_cycle = application_entry_different_calculations(a)
if a.overdue_days < 90:
logging.info(
'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'
.format(a.id, new_cycle, a.cycle, a.overdue_days))
if new_cycle > a.cycle:
with db.atomic():
if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle
.C1B.value):
bomber_id = (a.latest_bomber_id if a.latest_bomber_id else
a.cycle)
DispatchAppHistory.update(out_at=datetime.now(),
out_overdue_days=a.overdue_days).where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == bomber_id).execute()
Escalation.create(application=a.id, type=EscalationType.
AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,
current_cycle=a.cycle, escalate_to=new_cycle,
current_bomber_id=a.latest_bomber)
dis_app_update = DispatchApp.update(status=DisAppStatus.
ABNORMAL.value).where(DispatchApp.application == a.id)
dis_app_update.execute()
a.cycle = new_cycle
a.last_bomber = a.latest_bomber
a.status = ApplicationStatus.UNCLAIMED.value
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
if new_cycle == Cycle.C1B.value:
a.C1B_entry = datetime.now()
elif new_cycle == Cycle.C2.value:
a.C2_entry = datetime.now()
elif new_cycle == Cycle.C3.value:
a.C3_entry = datetime.now()
a.save()
logging.info('automatic escalation done')
def application_entry_different_calculations(app):
conf = {(1): [1, 10], (2): [11, 30], (3): [31, 60], (4): [61, 90], (5):
[91, 999999]}
for new_cycle, scopes in conf.items():
if scopes[0] <= app.overdue_days <= scopes[1]:
return new_cycle
return app.cycle
@action(MessageAction.BOMBER_CALC_SUMMARY)
def cron_summary(payload, msg_id):
"""已废弃"""
employees = Bomber.select(Bomber, Role).join(Role)
summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,
'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,
'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':
0, 'sms_sent': 0} for i in employees}
now_date = date.today()
cal_date = now_date - timedelta(days=1)
claimed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('claimed')).where(fn.DATE(Application.
claimed_at) == cal_date, Application.status << [ApplicationStatus.
PROCESSING.value, ApplicationStatus.REPAID.value], Application.
latest_bomber.is_null(False)).group_by(Application.latest_bomber)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False)).group_by(
Application.latest_bomber)
completed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('completed')).where(Application.
latest_bombing_time.is_null(False), fn.DATE(Application.
latest_bombing_time) == cal_date, Application.latest_bomber.is_null
(False)).group_by(Application.latest_bomber)
escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(
Escalation.id).alias('escalated')).where(fn.DATE(Escalation.
created_at) == cal_date, Escalation.type == EscalationType.
AUTOMATIC.value, Escalation.current_bomber.is_null(False),
Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation
.current_bomber)
transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).
alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==
cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(
Transfer.operator)
promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(
BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.
created_at) == cal_date, BombingHistory.result == BombingResult.
HAS_PROGRESS.value).group_by(BombingHistory.bomber)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,
RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.
current_bomber)
calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id
).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==
cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)
calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.
record_id).alias('calls_connected')).where(fn.DATE(CallLog.
time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==
'1').group_by(CallLog.user_id)
sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(
ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_
(ConnectType.sms()), ConnectHistory.created_at >= cal_date,
ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)
for i in claimed:
summary[i.latest_bomber_id]['claimed'] += i.claimed
for i in completed:
summary[i.latest_bomber_id]['completed'] += i.completed
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
for i in escalated:
summary[i.current_bomber_id]['escalated'] += i.escalated
for i in transferred:
summary[i.operator_id]['transferred'] += i.transferred
for i in promised:
summary[i.bomber_id]['promised'] += i.promised
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in calls_made:
summary[int(i.user_id)]['calls_made'] += i.calls_made
for i in calls_connected:
summary[int(i.user_id)]['calls_connected'] += i.calls_connected
for i in sms_sent:
summary[i.operator_id]['sms_sent'] += i.sms_sent
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'claimed': data['claimed'], 'completed': data['completed'],
'cleared': data['cleared'], 'escalated': data['escalated'],
'transferred': data['transferred'], 'promised': data['promised'
], 'amount_recovered': data['amount_recovered'], 'calls_made':
data['calls_made'], 'calls_connected': data['calls_connected'],
'sms_sent': data['sms_sent'], 'date': cal_date})
if insert_args:
Summary.insert_many(insert_args).execute()
cycle_args = []
escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(
Escalation.id).alias('escalated_in')).where(Escalation.status ==
ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==
cal_date).group_by(Escalation.escalate_to)
for i in escalated_in:
cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.
escalated_in, 'date': cal_date})
amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM
(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)
for i in amount_recovered_total:
amount_recovered_total = i.principal_part + i.late_fee_part
cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':
amount_recovered_total, 'date': cal_date})
if cycle_args:
Summary.insert_many(cycle_args).execute()
logging.info('cal summary done')
send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})
@action(MessageAction.BOMBER_CALC_SUMMARY2)
def cron_summary2(payload, msg_id):
"""已废弃,定时任务还在执行,具体情况待确定"""
cal_date = date.today() - timedelta(days=1)
employees = Bomber.select(Bomber, Role).join(Role)
auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,
AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')
).where(fn.DATE(AutoCallActions.created_at) == cal_date)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.
is_null(False), RepaymentLog.is_bombed == True)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False))
auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,
AutoCallActions.result)
amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)
cleared = cleared.group_by(Application.latest_bomber)
summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,
'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered':
0} for e in employees}
for a in auto_call_actions:
summary[a.bomber_id]['answered_calls'] += a.count
if a.result == AutoCallResult.PTP.value:
summary[a.bomber_id]['ptp'] += a.count
if a.result == AutoCallResult.FOLLOW_UP.value:
summary[a.bomber_id]['follow_up'] += a.count
if a.result == AutoCallResult.NOT_USEFUL.value:
summary[a.bomber_id]['not_useful'] += a.count
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'answered_calls': data['answered_calls'], 'ptp': data['ptp'],
'follow_up': data['follow_up'], 'not_useful': data['not_useful'
], 'cleared': data['cleared'], 'amount_recovered': str(data[
'amount_recovered']), 'date': cal_date})
if insert_args:
Summary2.insert_many(insert_args).execute()
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_DISCOUNT_APPROVED)
def bomber_discount_approved(payload, msg_id):
app_id = payload['id']
msg_type = payload['msg_type']
discount_to = payload['discount_to']
effective_to = payload['effective_to']
application = Application.filter(Application.id == app_id).first()
if not application:
logging.error(
'discount approved msg send failed application %s not found',
app_id)
return
template = Template.select(Template.text, Template.app).where(Template.
type == ConnectType.AUTO_SMS.value, Template.id << Template.
get_auto_sms_tpl(msg_type), Template.app == application.app).first()
if not template:
logging.error('discount approved msg send failed template %s not found'
, msg_type)
return
promised_date = None
if application.promised_date:
promised_date = application.promised_date.strftime('%d-%m-%Y')
tpl_data = {'user_name': application.user_name, 'due_days': application
.overdue_days, 'app_name': application.app, 'phone': application.
user_mobile_no, 'cs_number': cs_number_conf.get(application.app,
'02150202889'), 'promised_date': promised_date, 'discount_to':
discount_to, 'effective_to': effective_to}
content = template.text.format(**tpl_data)
data_list = [{'receiver': '62' + application.user_mobile_no, 'content':
content, 'title': ''}]
send_sms(data_list, msg_type, application.app)
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_AUTO_CALL_LIST)
def bomber_auto_call_list(payload, msg_id):
with db.atomic():
bomber_dispatch_app()
dispatch_instalment_app()
dis_apps = DispatchApp.select(DispatchApp.application).where(
DispatchApp.status == DisAppStatus.NORMAL.value)
c1_apps = Application.select(Application.id, Application.cycle,
Application.follow_up_date, Application.called_times).where(Application
.status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.
AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.
is_rejected == False, Application.promised_date.is_null(True) | (fn
.DATE(Application.promised_date) < datetime.today().date())).order_by(
Application.overdue_days, Application.apply_at)
dis_apps_ids = [da.application_id for da in dis_apps]
insert_args = []
for a in c1_apps:
if a.id in dis_apps_ids:
continue
insert_args.append({'application': a.id, 'cycle': a.cycle,
'follow_up_date': a.follow_up_date, 'called_times': 1 if a.
called_times else 0, 'description': 'init'})
if not insert_args:
logging.error('no application need auto call')
with db.atomic():
AutoCallList.delete().execute()
for idx in range(0, len(insert_args), 100):
AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()
for idx in range(0, len(insert_args), 100):
application_list = [i['application'] for i in insert_args[idx:idx +
100]]
send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {
'application_list': application_list})
logging.info('bomber generate auto call list finished')
send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})
class ChangeBomberTool(object):
@staticmethod
def in_record(bomber_id, ids, bd):
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.
alias('application_id'), R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(
'expected_out_time'), Application.overdue_days.alias(
'entry_overdue_days')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << ids)
Application.update(latest_bomber=bomber_id).where(Application.id.
in_(ids)).execute()
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bd),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@staticmethod
def out_record(a, bd):
_id = str(a.id)
DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a
.overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]
.get('principal_paid')), out_late_fee_pending=bd[_id].get(
'late_fee') - bd[_id].get('late_fee_paid')).where(
DispatchAppHistory.application == a.id, DispatchAppHistory.
bomber_id == a.latest_bomber_id).execute()
a.last_bomber = a.latest_bomber
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
a.save()
@staticmethod
def classify(l, b):
if len(l) == 1:
return l[0]
_l = filter(lambda x: x['bomber'] != b, l)
return min(_l, key=lambda x: len(x['ids']))
<|reserved_special_token_0|>
def bomber_dispatch_app():
try:
c1a_dispatch_app()
except Exception as e:
logging.error('c1a_dispatch_app error:%s' % str(e))
cycle = {(1): 10, (2): 30, (3): 60, (4): 90}
apps = Application.select().where(fn.DATE(Application.C2_entry) == date
.today(), Application.type == ApplicationType.CASH_LOAN.value)
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C2.value)
apps_ids = [a.id for a in apps]
dispatch_inserts = []
start_index = 0
apps_length = len(apps_ids)
logging.warning('apps length %s' % str(apps_length))
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)
gen = CycleIter([b.id for b in bombers])
existing_list = []
end_index = start_index + int(apps_length * p.app_percentage)
logging.info('partner length %s' % str(end_index))
if not apps_ids[start_index:end_index]:
continue
bills = BillService().bill_list(application_ids=apps_ids[
start_index:end_index])
bill_dict = {bill['application_id']: bill for bill in bills}
for a_id in apps_ids[start_index:end_index]:
bomber = average_gen(gen, existing_list)
q = DispatchApp.delete().where(DispatchApp.application == a_id
).execute()
dispatch_inserts.append({'application': a_id, 'bomber': bomber,
'partner': p.id})
application = Application.select().where(Application.id == a_id
).first()
application.latest_bomber = bomber
application.status = ApplicationStatus.AB_TEST.value
application.ptp_bomber = None
application.save()
day_next_cycle = cycle.get(application.cycle
) - application.overdue_days
DispatchAppHistory.create(application=a_id, partner_id=p.id,
bomber_id=bomber, entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - Decimal(
bill_dict[a_id].get('principal_paid')),
entry_late_fee_pending=Decimal(bill_dict[a_id].get(
'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')
), expected_out_time=date.today() + timedelta(days=
day_next_cycle))
start_index = end_index
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
config = SystemConfig.prefetch(SCI.AB_TEST_C2)
c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)
c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)
c2 = apps_ids[start_index:]
if c2:
bills = BillService().bill_list(application_ids=c2)
else:
bills = []
bill_dict = {bill['application_id']: bill for bill in bills}
logging.info('c2 AB_test length: %s' % str(c2))
gen = CycleIter(c2_bomber)
existing_list = []
for c in c2:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == c).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = 46 - application.overdue_days
DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at
=datetime.now(), entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - bill_dict[c].get(
'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(
'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),
expected_out_time=date.today() + timedelta(days=day_next_cycle))
ab_test_other()
def c1a_dispatch_app():
today = datetime.today().date()
tomorrow = today + timedelta(days=1)
c1a_apps = Application.select().where(Application.status << [
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value], Application.dpd1_entry >= today, Application.dpd1_entry <
tomorrow, Application.type == ApplicationType.CASH_LOAN.value)
all_aids = [a.id for a in c1a_apps]
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C1A.value)
end = 0
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
is_del == 0)
start = end
end += int(len(all_aids) * p.app_percentage)
aids = all_aids[start:end]
bids = [b.id for b in bombers]
if not bids or not aids:
continue
average_number = get_average_number(len(aids), len(bids))
p_end = 0
for i, bid in enumerate(bids):
p_start = p_end
p_end += average_number[i]
b_aids = aids[p_start:p_end]
with db.atomic():
q = Application.update(latest_bomber=bid, status=
ApplicationStatus.AB_TEST.value).where(Application.id <<
b_aids).execute()
params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,
'application_ids': b_aids, 'dest_bomber_id': bid}
new_in_record(**params)
try:
dispatch_inserts = []
for aid in b_aids:
dispatch_inserts.append({'application': aid, 'bomber':
bid, 'partner': p.id, 'status': DisAppStatus.NORMAL
.value})
if dispatch_inserts:
q = DispatchApp.insert_many(dispatch_inserts).execute()
except Exception as e:
logging.error('c1a分件写入dispatch_app error:%s' % str(e))
<|reserved_special_token_0|>
def get_cash_bomber(bids, cycle):
cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==
0, Bomber.instalment != cycle)
cash_bids = [b.id for b in cash_bombers]
return cash_bids
<|reserved_special_token_0|>
def out_and_in_record_instalment(**kwargs):
if not kwargs.get('application_ids'):
return
out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(
DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True)).execute()
cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(kwargs['dest_bomber_id'])).alias(
'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.
alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).
alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << kwargs[
'application_ids'])
application_list = list(subquery)
for idx in range(0, len(application_list), 50):
applications = application_list[idx:idx + 50]
app_ids = [i.application_id for i in applications]
overdue_bills = OverdueBill.select().where(OverdueBill.
collection_id << app_ids)
sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]
bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)
insert_args = lambad_instalment_result(bill_list, applications)
if not insert_args:
continue
DispatchAppHistory.insert_many(insert_args).execute()
<|reserved_special_token_0|>
def c1b_dispatch_in_record(**kwargs):
app_ids = kwargs.get('apps')
partner_id = kwargs.get('partner_id', 'null')
bill_dict = kwargs.get('bill_dict')
period = kwargs.get('period')
bomber_id = kwargs.get('bomber_id')
if not all([app_ids, partner_id, bill_dict, period]):
return False
bill_dict = {str(k): v for k, v in bill_dict.items()}
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().
alias('entry_at'), Application.overdue_days.alias(
'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.id << app_ids)
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
<|reserved_special_token_0|>
def check_call_history(application):
app_create_at = application.created_at + timedelta(days=4)
if datetime.today().date() > app_create_at.date():
call_actions = CallActions.select().where(CallActions.type == 0,
CallActions.application == application.id, CallActions.
created_at > datetime.now() - timedelta(days=5))
for call in call_actions:
if call.phone_status == PhoneStatus.CONNECTED.value:
return True
return False
return True
@action(MessageAction.BOMBER_SCAVENGER)
def scavenger(payload, msg_id):
scavenger_time = -60
scavenger = SystemConfig.select().where(SystemConfig.key ==
'SCAVENGER_TIME').first()
if scavenger and scavenger.value.isdigit():
scavenger_time = -int(scavenger.value)
update_auto_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value, description='scavenger').where(AutoCallList.status ==
AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime
.now() + timedelta(minutes=scavenger_time))
count = update_auto_call_list.execute()
logging.info('scavenger processed %s application', count)
mail_box_scavenger_time = -30
mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==
'MAIL_BOX_SCAVENGER_TIME').first()
if mail_box_scavenger and mail_box_scavenger.value.isdigit():
mail_box_scavenger_time = -int(mail_box_scavenger.value)
update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.
value, AutoCallList.updated_at < datetime.now() + timedelta(minutes
=mail_box_scavenger_time))
mail_box_count = update_mail_box_call_list.execute()
logging.info('scavenger update mail box %s', mail_box_count)
update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value
).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.
updated_at < datetime.now() + timedelta(minutes=-30))
ivr_result = update_auto_ivr.execute()
logging.info('scavenger update %s ivr' % ivr_result)
<|reserved_special_token_0|>
@action(MessageAction.REPORT_BOMBER_COLLECTION)
def report_bomber_collection(payload, msg_id):
start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)
).scalar()
now = datetime.now()
if start_date and str(start_date) == str(now)[:10]:
return
end_date = str(now + timedelta(days=1))[:10]
start_date = str(now)[:10]
dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))
all_overdue_loan_sql1 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
GROUP BY 1;
"""
s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()
d1 = OperatedDict(s_data1)
all_overdue_loan_sql2 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE DATE(ba.follow_up_date) > CURDATE()
AND ba.called_counts = 0
GROUP BY 1;
"""
s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()
d2 = OperatedDict(s_data2)
overdue_loans_entered_into_predict_call_system_sql = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE ba.called_counts >= 1
GROUP BY 1;
"""
s_data3 = readonly_db.execute_sql(
overdue_loans_entered_into_predict_call_system_sql).fetchall()
d3 = OperatedDict(s_data3)
loans_completed_sql = """
SELECT ba.cycle, COUNT(DISTINCT ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()
d4 = OperatedDict(s_data4)
connected_calls_automatic_sql = """
SELECT ba.cycle, COUNT(ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()
d5 = OperatedDict(s_data5)
connected_calls_manual_sql = """
SELECT bb.cycle, COUNT(bb.id)
FROM bomber.bombing_history bb
WHERE DATE(bb.created_at) = curdate()
AND (bb.bomber_id < 150 OR bb.bomber_id > 200)
GROUP BY bb.cycle;
"""
s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()
d6 = OperatedDict(s_data6)
logging.info('Directly get data from database successfully.')
c1 = d1 - d2
c2 = d3
c3 = c2 / c1
c4 = d4
c5 = c4 / c2
c6 = d5
c7 = c6 / c4
c8 = d6
c9 = OperatedDict(get_agent())
c10 = (c6 + c8) / c9
try:
c11 = average_call_duration_team(start_date, end_date)
except AttributeError:
c11 = {}
lst = []
for i in range(1, 5):
lst.append({'apply_date': start_date, 'cycle': dct[i],
'all_overdue_loan': c1.get(i, 0),
'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),
'of_overdue_loans_entered_into_predict_call_system': round(c3.
get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),
'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *
100, 1), 'connected_calls_automatic': c6.get(i, 0),
'connected_calls_automatic_completed_loans': round(c7.get(i, 0),
1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i,
0), 'average_calls_agent': round(c10.get(i, 0), 1),
'average_call_duration_team': round(c11.get(i, 0), 1)})
ReportCollection.insert_many(lst).execute()
logging.info('report_bomber_collection:Done!')
@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)
def bomber_auto_call_list_record(payload, msg_id):
"""记录一年的auto_call_list,删除前一天的数据,增加今天的数据"""
now = datetime.now()
if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):
date_sql = """
SELECT DATE(created_at) FROM auto_call_list_record
GROUP BY DATE(created_at) limit 1
"""
del_date = db.execute_sql(date_sql).fetchone()[0]
del_sql = """
DELETE FROM auto_call_list_record WHERE date(created_at) = %s
"""
db.execute_sql(del_sql, [del_date])
sql = """
INSERT INTO auto_call_list_record
SELECT * FROM auto_call_list
"""
db.execute_sql(sql)
logging.info('bomber_auto_call_list_record done')
<|reserved_special_token_0|>
def new_out_record(**kwargs):
if not kwargs['application_ids']:
return
DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.
bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.
application << kwargs['application_ids'], DispatchAppHistory.out_at
.is_null(True)).execute()
if kwargs.get('month_dispatch'):
return
try:
Application.update(ptp_bomber=None).where(Application.id << kwargs[
'application_ids']).execute()
except Exception as e:
logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[
'application_ids'], str(e)))
<|reserved_special_token_0|>
def end_old_application(old_app, paid=False):
if paid:
if old_app.status == OldLoanStatus.WAITING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return old_app.application_id
end_date = old_app.end_date
now = datetime.now()
if now >= max(end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
old_app.save()
return old_app.application_id
<|reserved_special_token_0|>
def start_old_application(old_app, cancel=False):
application_id = old_app.application_id
if cancel and old_app.status == OldLoanStatus.PAID.value:
now = datetime.now()
if old_app.start_date is None:
old_app.status = OldLoanStatus.WAITING.value
elif now >= max(old_app.end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
DispatchAppHistory.update(out_at=max(old_app.end_date, old_app.
promised_date or now)).where(DispatchAppHistory.bomber_id ==
old_app.bomber_id, DispatchAppHistory.application ==
application_id).execute()
else:
old_app.status = OldLoanStatus.PROCESSING.value
DispatchAppHistory.update(out_at=None).where(DispatchAppHistory
.bomber_id == old_app.bomber_id, DispatchAppHistory.
application == application_id).execute()
old_app.save()
return
application = Application.get_or_none(Application.id == application_id,
Application.status != ApplicationStatus.REPAID.value, Application.
overdue_days > 90, Application.promised_date.is_null(True) | (fn.
DATE(Application.promised_date) < datetime.today().date()))
if not application:
logging.error('Can not set old application %s to start collecting',
application_id)
return
if old_app.status in OldLoanStatus.no_available():
logging.info('%s has finished or paid', old_app.application_id)
return
config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)
sp = config.get(SCI.OLD_APP_PERIOD, SCI.OLD_APP_PERIOD.default_value)
old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value
old_app.status = OldLoanStatus.PROCESSING.value
old_app.bomber_id = old_app_bomber
old_app.start_date = datetime.now()
if not old_app.end_date:
old_app.end_date = datetime.now() + timedelta(days=sp)
old_app.save()
in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,
application_ids=[old_app.application_id], expected_out_time=str(
old_app.end_date))
<|reserved_special_token_0|>
def run_member_sql(sql):
result = [0, 0]
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
sql_result = cursor.fetchone()
if sql_result:
result = sql_result
except Exception as e:
logging.info('run sql error: %s' % str(sql))
return result
<|reserved_special_token_0|>
def get_before_bomber(date_time):
begin_time = str(date_time - timedelta(days=7))
end_time = str(date_time)
old_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at<date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (begin_time, begin_time))
old_data = run_one_sql(old_sql)
new_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at> '%s'
and created_at<'%s'
and overdue_days=1;
"""
% (begin_time, end_time))
new_data = run_one_sql(new_sql)
dpd4_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>date_add('%s',interval 1 day)
and created_at< date_add('%s',interval 1 day)
and overdue_days=4;
"""
% (begin_time, end_time))
dpd4_data = run_one_sql(dpd4_sql)
dpd2_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at< date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (end_time, end_time))
dpd2_data = run_one_sql(dpd2_sql)
all_money = old_data + new_data
repayment = all_money - dpd4_data - dpd2_data
pro = 0
if all_money:
pro = repayment / all_money * 100
RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,
proportion=pro, repayment=repayment)
<|reserved_special_token_0|>
def get_c1a_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=19)
repayment_sql = (
"""
select
sum(b.principal_part+b.late_fee_part) as paid_amount,
cdt
from
(select
br.principal_part, br.late_fee_part,
date(cdt) as cdt, br.repay_at, br.application_id
from (
select ba.id, ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
if not repayment:
return
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
for d in repayment:
repay = d[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
d[1], RepaymentReportInto.cycle == Cycle.C1A.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = round(repay, 3)
pro = repay / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<|reserved_special_token_0|>
def get_c2_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
not_contain_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=37)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_money and repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(
not_contain_money, 3), proportion='0', repayment=0,
is_first_loan=is_first_loan, contain_out=ContainOut.
NOT_CONTAIN.value)
for repay in not_contain_repay:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(all_money, 3),
proportion='0', repayment=0, is_first_loan=
is_first_loan, contain_out=ContainOut.CONTAIN.value)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
def get_c3_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id, ba.c3_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=30)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c3_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c3_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = '%s'
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 4
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C3.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
if not repayment:
return
for repay in repayment:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
repay[1], RepaymentReportInto.cycle == Cycle.C3.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = repay_money
pro = 0
if report.all_money and int(report.all_money):
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<|reserved_special_token_0|>
def get_static_bomber(begin_date):
active_date = begin_date - timedelta(days=8)
bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),
BomberR.last_active_at.alias('active')).where(BomberR.
last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])
summary = []
for bomber in bombers:
summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':
bomber.role.cycle, 'work_ind': 0})
SummaryBomber.insert_many(summary).execute()
<|reserved_special_token_0|>
@time_logger
def get_call_and_made(end_date, begin_date, real_time_query=False):
call_sql = (
"""
select
bomber_id,
count(case when relationship is not null then application_id end)
as 'call_cnt',
count(distinct case when relationship is not null then
application_id end) as 'call_case',
count(case when phone_status=4 then application_id end) as 'connect',
count(distinct case when phone_status=4 then application_id end)
as 'connect_case'
from (
select bomber_id,application_id,phone_status, cycle, relationship
from bomber.call_actions ba
where created_at>'%s' and created_at<'%s'
and type in (0, 1)
) a
group by 1
"""
% (begin_date, end_date))
calls = run_all_sql(call_sql)
if real_time_query:
return calls
for call in calls:
bomber, call_cnt, case_made, connect_cnt, case_connect = call
SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,
call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(
SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date
).execute()
return calls
@time_logger
def get_claimed_cnt(end_date, begin_date, real_time_query=False):
table_date = begin_date - timedelta(days=30)
claimed_sql = (
"""
SELECT
COUNT( `t1`.`application_id` ) AS cnt,
`t1`.`bomber_id` AS bomber_id
FROM
`dispatch_app_history` AS t1
WHERE
( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null )
AND ( `t1`.`bomber_id` != 1000 )
AND ( `t1`.`partner_id` IS null )
AND ( `t1`.`entry_at` > '%s' )
AND ( `t1`.`entry_at` < '%s' )
GROUP BY
`t1`.`bomber_id`
"""
% (begin_date, table_date, end_date))
claimeds = run_all_sql(claimed_sql)
if real_time_query:
return claimeds
for claimed in claimeds:
cnt, bomber_id = claimed
SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == bomber_id).execute()
return claimeds
def get_sms_data(end_data, begin_data):
all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(
'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')
).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.
created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())
).group_by(ConnectHistoryR.operator)
for sms in all_sms:
SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==
begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()
return all_sms
<|reserved_special_token_0|>
@action(MessageAction.SUMMARY_NEW)
def summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_active_bomber(begin_date)
get_call_and_made(end_date, begin_date)
get_claimed_cnt(end_date, begin_date)
get_sms_data(end_date, begin_date)
get_ptp_data(end_date, begin_date)
get_recover_amount(end_date, begin_date)
get_unfollowed(begin_date)
get_unfollowed_call(begin_date)
def get_new_case_amount(begin_date, end_date):
all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.
entry_late_fee_pending + DispatchAppHistoryR.
entry_principal_pending).alias('pending'), DispatchAppHistoryR.
bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')
).where(DispatchAppHistoryR.entry_at > begin_date,
DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.
partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)
for case in all_case:
SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt
=case.cnt).where(SummaryBomber.bomber_id == case.bomber_id,
SummaryBomber.time == begin_date).execute()
return all_case
<|reserved_special_token_0|>
def get_kp_today(begin_date, end_date):
sql = (
"""
select bomber_id, count(distinct application_id)
from(
SELECT bomber_id, application_id
FROM bomber.auto_call_actions a
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where a.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))
UNION
SELECT bomber_id, application_id
FROM bomber.bombing_history b
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where b.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))) result
GROUP BY 1
"""
% (begin_date, end_date, begin_date, begin_date, end_date, begin_date)
)
kp_today = run_all_sql(sql)
for kp in kp_today:
SummaryBomber.update(KP_today_cnt=kp[1]).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == kp[0]).execute()
<|reserved_special_token_0|>
def get_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c2_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c3_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
) a
GROUP BY 1
"""
% (begin_date, end_date, begin_date, end_date, begin_date, end_date))
case_cleared_sums = run_all_sql(sql)
for clear in case_cleared_sums:
SummaryBomber.update(new_case_cleared_sum=clear[1]).where(
SummaryBomber.time == begin_date, SummaryBomber.bomber_id ==
clear[0]).execute()
@time_logger
def get_new_case_call(begin_date, end_date, real_query_time=False):
sql = (
"""
SELECT
bd.bomber_id,
count( DISTINCT bd.application_id )
FROM
bomber.dispatch_app_history bd
INNER JOIN bomber.call_actions bc
ON bd.application_id = bc.application_id
AND bd.bomber_id = bc.bomber_id
AND date( bd.entry_at ) = date( bc.created_at )
WHERE
entry_at > '%s'
AND entry_at < '%s'
AND partner_id IS NULL
GROUP BY 1
"""
% (begin_date, end_date))
new_case_calls = run_all_sql(sql)
if real_query_time:
return new_case_calls
for call in new_case_calls:
SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber
.bomber_id == call[0], SummaryBomber.time == begin_date).execute()
return new_case_calls
@time_logger
def get_calltime_avg(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction,
count( 1 ) AS auto_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
AND an.recording is not null
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]][0] += data[1]
result[data[0]][1] += data[2]
continue
result[data[0]] = [data[1], data[2]]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=
value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == key).execute()
return result
def get_no_calltime_avg(begin_date, end_date):
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND (an.status!='ANSWERED' or an.recording is null)
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
for data in manuals:
SummaryBomber.update(calltime_no_case_sum=data[1],
calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /
data[2] if data[2] else 0).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == data[0]).execute()
@time_logger
def get_calltime_sum(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == key).execute()
return result
<|reserved_special_token_0|>
@action(MessageAction.UPDATE_SUMMARY_NEW)
def update_summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'UPDATE_SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_new_case_amount(begin_date, end_date)
get_kp_cleared(begin_date, end_date)
get_kp_today(begin_date, end_date)
get_ptp_cnt(begin_date, end_date)
get_ptp_call_cnt(begin_date, end_date)
get_new_case_cleared(begin_date, end_date)
get_new_case_call(begin_date, end_date)
get_calltime_avg(begin_date, end_date)
get_no_calltime_avg(begin_date, end_date)
get_calltime_sum(begin_date, end_date)
def get_cycle_claimed(begin_date, end_date):
sql = (
"""
select cycle,count(1)
from bomber.application where cycle in (1,2,3,4)
and (finished_at is null or (finished_at>'%s'))
and created_at>'2018-09-01'
group by 1
"""
% begin_date)
result = run_all_sql(sql)
return result
@time_logger
def cycle_new_case(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( ba.id ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.created_at ) = bo.which_day_overdue
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c1b_entry ) = bo.which_day_overdue
WHERE
c1b_entry > '%s'
AND c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c2_entry ) = bo.which_day_overdue
WHERE
c2_entry > '%s'
AND c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c3_entry ) = bo.which_day_overdue
WHERE
c3_entry > '%s'
AND c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
all_datas = run_all_sql(sql)
if real_time_query:
return all_datas
for data in all_datas:
SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()
return all_datas
@time_logger
def get_cycle_new_case_call(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.created_at ) = date( bc.created_at )
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c1b_entry ) = date( bc.created_at )
WHERE
ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c2_entry ) = date( bc.created_at )
WHERE
ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c3_entry ) = date( bc.created_at )
WHERE
ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
cycle_datas = run_all_sql(sql)
if real_time_query:
return cycle_datas
for data in cycle_datas:
SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber
.time == begin_date, SummaryBomber.cycle == data[0],
SummaryBomber.bomber_id == data[0]).execute()
return cycle_datas
def get_cycle_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
'1' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.created_at ) = date( br.repay_at )
WHERE ba.created_at > '%s'
AND ba.created_at < '%s'
GROUP BY 1, 2 ) a
UNION
SELECT
'2' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
GROUP BY 1, 2) a
"""
% (begin_date, end_date, begin_date, end_date))
cycle_cleared = run_all_sql(sql)
for i in cycle_cleared:
SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber
.cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.
time == begin_date).execute()
<|reserved_special_token_0|>
@action(MessageAction.SUMMARY_NEW_CYCLE)
def summary_new_cycle(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'SUMMARY_NEW_CYCLE').first()
if worker_log and worker_log.logs >= 5:
return
cycle_datas = SummaryBomber.select(fn.SUM(SummaryBomber.
new_case_amount_sum).alias('new_case_amount_sum'), fn.SUM(
SummaryBomber.new_case_cleared_sum).alias('new_case_cleared_sum'),
fn.SUM(SummaryBomber.case_made_cnt).alias('case_made_cnt'), fn.SUM(
SummaryBomber.case_connect_cnt).alias('case_connect_cnt'), fn.SUM(
SummaryBomber.promised_cnt).alias('promised_cnt'), fn.SUM(
SummaryBomber.promised_amount).alias('promised_amount'), fn.SUM(
SummaryBomber.cleared_cnt).alias('cleared_cnt'), fn.SUM(
SummaryBomber.cleared_amount).alias('cleared_amount'), fn.SUM(
SummaryBomber.new_case_cnt).alias('new_case_cnt'), fn.SUM(
SummaryBomber.new_case_call_cnt).alias('new_case_call_cnt'), fn.SUM
(SummaryBomber.unfollowed_cnt).alias('unfollowed_cnt'), fn.SUM(
SummaryBomber.unfollowed_call_cnt).alias('unfollowed_call_cnt'), fn
.SUM(SummaryBomber.call_cnt).alias('call_cnt'), fn.SUM(
SummaryBomber.sms_cnt).alias('sms_cnt'), fn.SUM(SummaryBomber.
call_connect_cnt).alias('call_connect_cnt'), fn.SUM(SummaryBomber.
ptp_today_cnt).alias('ptp_today_cnt'), fn.SUM(SummaryBomber.
ptp_today_call_cnt).alias('ptp_today_call_cnt'), fn.SUM(
SummaryBomber.ptp_next_cnt).alias('ptp_next_cnt'), fn.SUM(
SummaryBomber.ptp_next_call_cnt).alias('ptp_next_call_cnt'), fn.SUM
(SummaryBomber.KP_cleared_cnt).alias('KP_cleared_cnt'), fn.SUM(
SummaryBomber.KP_today_cnt).alias('KP_today_cnt'), fn.SUM(
SummaryBomber.work_ind).alias('work_ind'), fn.SUM(SummaryBomber.
calltime_sum).alias('calltime_sum'), fn.SUM(SummaryBomber.
calltime_case_sum).alias('calltime_case_sum'), fn.SUM(SummaryBomber
.calltime_case_cnt).alias('calltime_case_cnt'), fn.SUM(
SummaryBomber.calltime_no_case_sum).alias('calltime_no_case_sum'),
fn.SUM(SummaryBomber.calltime_no_case_cnt).alias(
'calltime_no_case_cnt'), SummaryBomber.cycle.alias('cycle')).where(
SummaryBomber.time == begin_date, SummaryBomber.cycle << Cycle.values()
).group_by(SummaryBomber.cycle)
for cycle_data in cycle_datas:
SummaryBomber.create(bomber_id=cycle_data.cycle, time=begin_date,
cycle=cycle_data.cycle, new_case_amount_sum=cycle_data.
new_case_amount_sum, new_case_cleared_sum=cycle_data.
new_case_cleared_sum, new_case_cleard_rate=0, case_made_cnt=
cycle_data.case_made_cnt, case_made_rate=0, case_connect_cnt=
cycle_data.case_connect_cnt, case_connect_rate=0, promised_cnt=
cycle_data.promised_cnt, promised_amount=cycle_data.
promised_amount, cleared_cnt=cycle_data.cleared_cnt,
cleared_amount=cycle_data.cleared_amount, new_case_cnt=
cycle_data.new_case_cnt, new_case_call_cnt=cycle_data.
new_case_call_cnt, unfollowed_cnt=cycle_data.unfollowed_cnt,
unfollowed_call_cnt=cycle_data.unfollowed_call_cnt, call_cnt=
cycle_data.call_cnt, sms_cnt=cycle_data.sms_cnt,
call_connect_cnt=cycle_data.call_connect_cnt, calltime_case_avg
=0, ptp_today_cnt=cycle_data.ptp_today_cnt, ptp_today_call_cnt=
cycle_data.ptp_today_call_cnt, ptp_next_cnt=cycle_data.
ptp_next_cnt, ptp_next_call_cnt=cycle_data.ptp_next_call_cnt,
KP_cleared_cnt=cycle_data.KP_cleared_cnt, KP_today_cnt=
cycle_data.KP_today_cnt, KP_cleared_rate=0, work_ind=cycle_data
.work_ind, calltime_sum=cycle_data.calltime_sum,
calltime_case_sum=cycle_data.calltime_case_sum,
calltime_case_cnt=cycle_data.calltime_case_cnt,
calltime_no_case_sum=cycle_data.calltime_no_case_sum,
calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,
work_time_sum=cycle_data.work_time_sum)
cycle_claimed = get_cycle_claimed(begin_date, end_date)
for claimed in cycle_claimed:
SummaryBomber.update(claimed_cnt=claimed[1]).where(SummaryBomber.
time == begin_date, SummaryBomber.cycle == claimed[0],
SummaryBomber.bomber_id == claimed[0]).execute()
cycle_new_case(begin_date, end_date)
get_cycle_new_case_call(begin_date, end_date)
get_cycle_new_case_cleared(begin_date, end_date)
get_cycle_case_made_cnt(begin_date, end_date)
all_datas = SummaryBomber.filter(SummaryBomber.time == begin_date)
for data in all_datas:
cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum if
data.new_case_amount_sum else 0) * 100
data.new_case_cleard_rate = cl_rat
case_made_rate = (data.case_made_cnt / data.claimed_cnt if data.
claimed_cnt else 0) * 100
data.case_made_rate = case_made_rate
case_connect_rate = (data.case_connect_cnt / data.case_made_cnt if
data.case_made_cnt else 0) * 100
data.case_connect_rate = case_connect_rate
calltime_case_avg = (data.calltime_case_sum / data.
calltime_case_cnt if data.calltime_case_cnt else 0)
data.calltime_case_avg = calltime_case_avg
calltime_no_case_avg = (data.calltime_no_case_sum / data.
calltime_no_case_cnt if data.calltime_no_case_cnt else 0)
data.calltime_no_case_avg = calltime_no_case_avg
KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt if data.
KP_today_cnt else 0) * 100
data.KP_cleared_rate = KP_cleared_rate
data.save()
<|reserved_special_token_0|>
def get_change_bomber():
cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle
.C3.value}
result = {}
bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,
BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=
BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.
created_at) == date.today(), BomberLog.role_id << list(
cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.
instalment == 0).dicts()
for b_log in bomber_logs:
cycle = cycle_role_map.get(b_log['role_id'])
group_id = b_log['group_id']
if cycle in result:
if group_id not in result[cycle]:
result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],
'new_ids': []}
else:
result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],
'new_ids': []}}
if b_log['operation'] == 0:
result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])
if result:
bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map
.keys())), Bomber.is_del == 0, Bomber.instalment == 0)
for b in bombers:
cycle_result = result.get(cycle_role_map[b.role_id], {})
role_result = cycle_result.get(b.group_id)
if not role_result:
continue
role_result['new_ids'].append(b.id)
resutl_list = []
for cycle, group_dict in result.items():
resutl_list.extend(list(group_dict.values()))
return resutl_list
return []
<|reserved_special_token_0|>
def get_surplus_application(new_ids, del_ids, average_nums, classified_apps):
surplus_apps = []
for del_id in del_ids:
del_res = classified_apps.get(del_id, {})
p_list = del_res.get('p_list', [])
np_list = del_res.get('np_list', [])
del_res['need_num'] = -(len(p_list) + len(np_list))
del_res['to_list'] = np_list + p_list
surplus_apps.extend(p_list)
surplus_apps.extend(np_list)
for index, bid in enumerate(new_ids):
average = average_nums[index]
bomber_app = classified_apps.get(bid)
if not bomber_app:
bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid
).first()
bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':
[], 'np_num': 0, 'need_num': average, 'partner_id': bomber.
partner_id if bomber else ''}
classified_apps[bid] = bomber_app
else:
p_num = len(bomber_app['p_list'])
np_num = len(bomber_app['np_list'])
if p_num > average:
bomber_app['need_num'] = -np_num
else:
bomber_app['need_num'] = average - (p_num + np_num)
bomber_app['p_num'] = p_num
bomber_app['np_num'] = np_num
if bomber_app['need_num'] < 0:
random.shuffle(bomber_app['np_list'])
res_over = bomber_app['np_list'][:-bomber_app['need_num']]
bomber_app['to_list'] = res_over
surplus_apps.extend(res_over)
classified_apps_list = sorted(classified_apps.values(), key=lambda x: x
['need_num'], reverse=True)
return surplus_apps, classified_apps_list
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)
def bomber_dispatch_applications(payload, msg_id):
change_bombers = get_change_bomber()
instalment_change_bombers = get_instalment_change_bomber()
params = {ApplicationType.CASH_LOAN.value: change_bombers,
ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}
for type, bombers in params.items():
change_bomber_dispatch_apps(change_bombers=bombers, type=type)
@action(MessageAction.REPAIR_BOMBER)
def repair_bomber(payload, msg_id):
app_mobile = payload['app_mobile']
username = payload.get('user_name')
logging.info('start repair bomber, number: %s' % app_mobile)
if 'mobile_no' in payload and payload['mobile_no']:
mobile = number_strip(str(payload['mobile_no']))[:64]
name = payload.get('mobile_name')
application = Application.filter(Application.user_mobile_no == mobile)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, mobile, username, name)
if 'tel_no' in payload and payload['tel_no']:
tel_no = number_strip(str(payload['tel_no']))[:64]
name = payload.get('tel_name')
application = Application.filter(Application.user_mobile_no == tel_no)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, tel_no, username, name)
<|reserved_special_token_0|>
def get_summary_daily_time():
mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')
mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')
now_date = datetime.now()
now_date_time = now_date.time()
today_str = str(now_date.date())
if now_date_time < mid_time_t1.time():
yes_date = now_date - timedelta(days=1)
yes_date_str = str(yes_date.date())
begin_str = yes_date_str + ' 17:20:00'
end_str = today_str + ' 00:00:00'
elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():
begin_str = today_str + ' 00:00:00'
end_str = today_str + ' 12:40:00'
else:
begin_str = today_str + ' 12:40:00'
end_str = today_str + ' 17:20:00'
begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')
end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')
summary_datetime = now_date - timedelta(minutes=30)
summary_date = summary_datetime.date()
return begin_time, end_time, summary_date
@action(MessageAction.SUMMARY_DAILY)
def summary_daily_data(payload, msg_id):
begin_time, end_time, summary_date = get_summary_daily_time()
call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.
bomber_id, CallActionsR.application_id, CallActionsR.promised_date,
CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(
CallActionsR.created_at >= begin_time, CallActionsR.created_at <
end_time, CallActionsR.type << (0, 1))
summary_dailys = {}
for call in call_actions:
if call.bomber_id not in summary_dailys:
summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.
bomber_id, 'summary_date': str(summary_date)}
if call.name and call.number:
summary_dailys[call.bomber_id]['call_cnt'] += 1
if call.promised_date:
summary_dailys[call.bomber_id]['ptp_cnt'] += 1
C1_sql = (
"""
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,a.cycle
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,2,4,5)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
"""
% (begin_time, end_time))
C1_repayment = run_all_sql(C1_sql)
other_sql = (
"""
select current_bomber_id,sum(pay_amount) as pay_amount,cycle
from (
select application_id,current_bomber_id,pay_amount,repay_at,cycle
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at,
br.cycle
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (3,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
"""
% (begin_time, end_time))
other_repayment = run_all_sql(other_sql)
all_repayment = C1_repayment + other_repayment
for res in all_repayment:
bomber_id, pay_amount, cycle = res
if bomber_id in summary_dailys:
summary_dailys[bomber_id]['repayment'] += pay_amount
else:
summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': cycle, 'repayment': pay_amount, 'bomber_id':
bomber_id, 'summary_date': str(summary_date)}
insert_values = list(summary_dailys.values())
if insert_values:
SummaryDaily.insert_many(insert_values).execute()
<|reserved_special_token_0|>
def get_app_logs(apps):
app_logs = {}
all_np_apps = []
all_p_apps = []
for a in apps:
latest_bomber = a['latest_bomber']
latest_bomber = a['cycle'] if not latest_bomber else latest_bomber
if latest_bomber in app_logs:
app_logs[latest_bomber]['to_ids'].append(a['id'])
else:
app_logs[latest_bomber] = {'bomber_id': latest_bomber, 'to_ids':
[a['id']], 'np_ids': [], 'p_ids': []}
if a['promised_date'] and a['promised_date'].date() >= datetime.now(
).date():
app_logs[latest_bomber]['p_ids'].append(a['id'])
all_p_apps.append(a)
else:
app_logs[latest_bomber]['np_ids'].append(a['id'])
all_np_apps.append(a)
return app_logs, all_np_apps, all_p_apps
def month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):
apps = list(apps)
np_apps = list(np_apps)
random.shuffle(np_apps)
apps_len = len(apps)
np_apps_len = len(np_apps)
end = 0
all_app_precentage = 0
partners = Partner.select().where(Partner.cycle == cycle, Partner.
status == PartnerStatus.NORMAL.value)
for p in partners:
all_app_precentage += p.app_percentage
for partner in partners:
bombers = Bomber.select().where(Bomber.partner == partner.id,
Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.
value)
bids = {b.id: b for b in bombers}
if len(bids) == 0:
logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))
continue
start = end
if np_apps_len >= int(apps_len * all_app_precentage):
end = start + int(apps_len * partner.app_percentage)
else:
end = start + int(np_apps_len * partner.app_percentage /
all_app_precentage)
partner_app = np_apps[start:end]
dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)
np_apps = np_apps[end:]
return np_apps
def month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):
sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',
(4): 'AB_TEST_C3'}
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0
)
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
bombers = bombers.where(Bomber.instalment == 0)
bids = {b.id: b for b in bombers}
if cycle == Cycle.C1A.value:
np_ids = [a['id'] for a in np_apps]
np = Application.update(status=ApplicationStatus.PROCESSING.value,
ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids
).execute()
bomber_app_logs = app_logs.get(cycle, {})
out_param = {'application_ids': bomber_app_logs.get('to_ids', []),
'month_dispatch': 1, 'src_bomber_id': cycle}
new_out_record(**out_param)
in_param = {'cycle': cycle, 'application_ids': np_ids,
'dest_bomber_id': cycle}
new_in_record(**in_param)
bomber_app_logs['need_num'] = len(np_apps)
bomber_app_logs['form_ids'] = np_ids
bomber_app_logs['status'] = 1
else:
dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)
dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)
def dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,
type=ApplicationType.CASH_LOAN.value):
apps = list(apps)
random.shuffle(apps)
bids_list = list(bids.keys())
if len(bids_list) <= 0:
logging.info('get_dispatch_app_to_bomber no bids')
return
average_num = get_average_number(len(apps), len(bids_list))
bomber_end = 0
with db.atomic():
for index, bid in enumerate(bids_list):
current_bomber = bids.get(bid)
bomber_app_logs = app_logs.get(bid, {})
bomber_start = bomber_end
bomber_end = bomber_start + average_num[index]
bomber_apps = apps[bomber_start:bomber_end]
from_p, from_np, from_ids, status = [], [], [], 0
for ba in bomber_apps:
promised_date = ba.get('promised_date')
from_ids.append(ba['id'])
if promised_date and promised_date.date() >= date.today():
from_p.append(ba['id'])
else:
from_np.append(ba['id'])
app_status = ApplicationStatus.AB_TEST.value
if (cycle == Cycle.C1A.value and not out_partner and type ==
ApplicationType.CASH_LOAN.value):
app_status = ApplicationStatus.PROCESSING.value
if from_p:
p = Application.update(ptp_bomber=bid, latest_bomber=bid,
status=app_status).where(Application.id << from_p).execute(
)
p_ids = bomber_app_logs.get('p_ids', []) + from_p
bomber_app_logs['p_ids'] = p_ids
if from_np:
np = Application.update(latest_bomber=bid, ptp_bomber=None,
status=ApplicationStatus.AB_TEST.value).where(
Application.id << from_np).execute()
np_ids = bomber_app_logs.get('np_ids', []) + from_np
bomber_app_logs['np_ids'] = np_ids
in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.
partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}
if type == ApplicationType.CASH_LOAN.value:
out_param = {'src_bomber_id': bid, 'application_ids':
bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}
new_out_record(**out_param)
new_in_record(**in_param)
else:
out_and_in_record_instalment(**in_param)
bomber_app_logs['status'] = 1
need_num = bomber_app_logs.get('need_num', 0) + average_num[index]
bomber_app_logs['need_num'] = need_num
all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids
bomber_app_logs['form_ids'] = all_form_ids
if not out_partner:
continue
try:
DispatchApp.delete().where(DispatchApp.application.in_(
from_ids)).execute()
dispatch_ins = [{'application': id, 'partner':
current_bomber.partner_id, 'bomber': bid, 'status':
DisAppStatus.NORMAL.value} for id in from_ids]
DispatchApp.insert_many(dispatch_ins).execute()
except Exception as e:
logging.info(
'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str
(e), bid, from_ids))
<|reserved_special_token_0|>
def calc_entry_time(overdue_days):
app_entry_time = {}
overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],
'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}
for key, value in overdue_entry.items():
if value[0] <= overdue_days <= value[1]:
app_entry_time[key] = datetime.now()
else:
app_entry_time[key] = None
return app_entry_time
<|reserved_special_token_0|>
@action(MessageAction.SUMMARY_BOMBER_OVERDUE)
def summary_bomber_overdue_everyday(payload, msg_id):
cycle_list = Cycle.values()
which_day = date.today()
for cycle in cycle_list:
apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,
ApplicationR.ptp_bomber, ApplicationR.overdue_days,
ApplicationR.promised_date, ApplicationR.follow_up_date,
ApplicationR.external_id, OverdueBillR.status, OverdueBillR.
periods, OverdueBillR.sub_bill_id).join(OverdueBillR,
JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id
).where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()
bomber_overdue_list = []
for app in apps:
status = app.get('status')
if status == ApplicationStatus.REPAID.value:
continue
ptp_bomber = app.get('ptp_bomber')
promised_date = app.get('promised_date')
follow_up_date = app.get('follow_up_date')
if not promised_date or promised_date.date() < date.today():
ptp_bomber = promised_date = None
if not follow_up_date or follow_up_date.date() < date.today():
follow_up_date = None
overdue_dict = {'collection_id': app.get('id'), 'external_id':
app.get('external_id'), 'sub_bill_id': app.get(
'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app
.get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':
ptp_bomber, 'promised_date': promised_date,
'follow_up_date': follow_up_date, 'which_day': which_day,
'overdue_days': app.get('overdue_days')}
bomber_overdue_list.append(overdue_dict)
try:
if bomber_overdue_list:
with db.atomic():
for index in range(0, len(bomber_overdue_list), 1000):
insert_list = bomber_overdue_list[index:index + 1000]
BomberOverdue.insert_many(insert_list).execute()
except Exception as e:
logging.info(
'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %
(cycle, str(which_day), str(e)))
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)
def today_ptp_auto_call_switch(payload, msg_id):
today = datetime.today().date()
next_day = today + timedelta(days=1)
apps = ApplicationR.select(ApplicationR.latest_bomber).where(
ApplicationR.promised_date < next_day, ApplicationR.promised_date >=
today, ApplicationR.promised_date.is_null(False), ApplicationR.
status != ApplicationStatus.REPAID.value, ApplicationR.cycle <
Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(
ApplicationR.latest_bomber)
bids = [a.latest_bomber_id for a in apps]
if not bids:
return
q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(
BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids
).execute()
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)
def update_today_switch_every_day(payload, msg_id):
q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(
BomberPtp.auto_ext.is_null(False)).execute()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_HEALTH_CHECK)
def health_check(payload, msg_id):
pass
<|reserved_special_token_0|>
def dpd1_process(lst):
"""已废弃的方法"""
if not lst:
return
for key, l in lst.items():
rule = getattr(BeforeInBomber, key).value
query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)
).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(
rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.
call_success()))
success_set = {i.loanid for i in query}
failed_list = list(set(l) - success_set)
post_params = {'$and': rule.get('$and'), 'app_list': failed_list}
resp = Hyperloop().post('/bomber/score/verify', json=post_params)
if not resp.ok:
logging.error('hyperloop score verification failed: %s, %s',
str(resp.status_code), str(resp.text))
logging.error('hyperloop score verification failed: %s', str(
post_params))
continue
logging.debug('hyperloop score verification success: %s', resp.content)
resp_json = resp.json()
app_list = resp_json['data']
if not app_list:
continue
for item in app_list:
if random.randint(0, 5) == 1:
send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':
int(item)})
@action(MessageAction.GET_IVR)
def get_ivr(payload, msg_id):
logging.warning('start get_ivr')
sys_config = SystemConfig.select().where(SystemConfig.key ==
'DPD1-3_INTO_IVR').first()
now = date.today()
if sys_config and sys_config.value:
start = now - timedelta(days=3)
else:
start = now
end = now + timedelta(days=4)
item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()
if not item:
AutoIVR.delete().execute()
current_page = 0
elif item.current_page >= item.total_page:
return
else:
current_page = item.current_page
auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,
'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,
'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':
10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,
'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,
'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,
'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,
'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,
'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,
'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,
'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,
'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,
'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,
'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':
101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,
'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,
'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,
'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}
current_page += 1
with db.atomic() as transaction:
while True:
bill_service = BillService()
ivr_action = bill_service.ivr_pages(page=current_page,
page_size=500, start_time=utc_datetime(str(start)),
end_time=utc_datetime(str(end)))
result = ivr_action['result']
page_size = int(ivr_action.get('page_size', 0))
total_page = int(ivr_action.get('total_page', 0))
insert_args = []
for a in result:
due_at = no_utc_datetime(a['due_at'])
days = (due_at.date() - now).days
if days == 2:
continue
if days > 0:
time = str(days)
else:
time = str(days).replace('-', 'PDP')
key = a['app_name'] + str(a['su']) + time
group = auto_ivr.get(key)
user_id = a['user_id']
try:
user_resp = AccountService().get_user(path_params={
'user_id': user_id})
if str(user_resp['mobile_no']) == str(a['user_mobile_no']):
numbers = a['user_mobile_no']
else:
numbers = a['user_mobile_no'] + ',' + user_resp.get(
'mobile_no')
except:
logging.error('Request Account Service Error.')
numbers = a['user_mobile_no']
insert_args.append({'application_id': a['id'], 'numbers':
numbers, 'group': group, 'user_id': user_id})
AutoIVR.insert_many(insert_args).execute()
if current_page == 1:
IVRActionLog.create(total_page=total_page, proc_date=now,
page_size=page_size, current_page=current_page)
item = IVRActionLog.get(IVRActionLog.proc_date == now)
else:
item.current_page = current_page
item.page_size = page_size
item.total_page = total_page
item.save()
transaction.commit()
current_page += 1
if current_page > int(total_page):
break
if sys_config and sys_config.value:
try:
classfiy_dpd_ptp_apps()
except Exception as e:
logging.error('dpd1-3_test_error:%s' % str(e))
def ivr_t2_test():
t2_groups = [39, 40, 41, 42, 43, 44]
ivr_test_proportion = 0.2
sys_config = SystemConfig.select().where(SystemConfig.key ==
'IVR_TEST_PROPORTION').first()
if sys_config and sys_config.value:
ivr_test_proportion = float(sys_config.value)
t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.
status == AutoIVRStatus.AVAILABLE.value)
t2_dict = defaultdict(list)
for ivr in t2_ivrs:
t2_dict[ivr.group].append(ivr.id)
test_ivr_ids = []
for group, ivr_ids in t2_dict.items():
number = ceil(len(ivr_ids) * ivr_test_proportion)
test_ivr_ids += ivr_ids[:number]
if not test_ivr_ids:
return
q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.
group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()
<|reserved_special_token_0|>
@action(MessageAction.APP_MERGE)
@deprecated(version='1.0', reason='This function will be removed soon')
def app_merge(payload, msg_id):
sql = """
select *
from (
select a.id as id
from dashboard.application as a
inner join repayment.bill2 as b on b.external_id = a.id
where not exists (
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.app = 'DanaCepat'
and a.is_first_loan = 1
and a.apply_at < '2018-08-23 20:50:00'
and b.overdue_days between 1 and 3
and b.status != 2) result
where not exists (
select 1
from bomber.application as a
where a.cycle = 1
and a.status = 4
and a.id = result.id
)
"""
cursor = readonly_db.get_cursor()
cursor.execute(sql)
new_data = cursor.fetchall()
cursor.close()
if new_data:
bomber = [103, 104]
for d in new_data:
app_id = {'id': d[0]}
application_overdue(app_id, None)
Application.update(status=ApplicationStatus.AB_TEST.value,
latest_bomber=random.choice(bomber), ptp_bomber=None).where(
Application.id == d[0]).execute()
logging.warning('add new app success')
ptp = date.today() - timedelta(days=1)
del_sql = (
"""
select a.id
from bomber.application as a
where exists(
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.cycle = 1
and a.status = 4
and (a.promised_date is null or a.promised_date < "%s")
"""
% ptp)
cursor = readonly_db.get_cursor()
cursor.execute(del_sql)
del_date = cursor.fetchall()
cursor.close()
if del_date:
return
ids = list()
for d in del_date:
ids.append(d[0])
Application.update(status=ApplicationStatus.UNCLAIMED.value,
latest_bomber=None).where(Application.id << ids).execute()
@action(MessageAction.APPLICATION_BOMBER)
def application_overdue(payload, msg_id):
application_id = payload['id']
sub_bill_id = payload['bill_sub_id']
local_app = Application.select().where(Application.external_id ==
application_id).order_by(Application.finished_at).first()
if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:
logging.info('application %s overdue, already exists', application_id)
add_contact(local_app)
return
if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:
overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==
sub_bill_id, OverdueBillR.external_id == application_id)
if overdue_bill.exists():
logging.info(
'application %s,sub_bill_id %s overdue, already exists' % (
application_id, sub_bill_id))
return
try:
sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
sub_bill = sub_bill[0]
except Exception:
logging.error(
'application %s overdue, get sub_bill info failed:Request To repayment Error'
, application_id)
return
if sub_bill['status'] == 2:
logging.error('application %s overdue, but bills already cleared',
application_id)
return
overdue_days = sub_bill.get('overdue_days', 0)
if overdue_days == 0:
logging.info('application {} no overdue'.format(str(application_id)))
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
logging.error('get application %s failed: Request to GoldenEye.',
application_id)
return
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
apply_history = Dashboard().get('/users/%s/apply-history' % user_id)
if not apply_history.ok:
logging.error(
'get user %s apply history failed: Request to Dashboard Failed.',
user_id)
return
history = apply_history.json().get('data')
loan_success_times = len([(1) for i in history if i['status'] in [80,
90, 100, 70] and i['id'] != gold_app['id']])
id = application_id
type = ApplicationType.CASH_LOAN.value
bill_id = sub_bill.get('bill_id')
amount = sub_bill.get('amount')
amount_net = sub_bill.get('amount_net')
interest_rate = sub_bill.get('interest_rate')
overdue_days = sub_bill.get('overdue_days')
origin_due_at = sub_bill.get('origin_due_at')
sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,
'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),
'overdue_days': overdue_days, 'origin_due_at': origin_due_at,
'amount': amount, 'amount_net': amount_net, 'interest_rate':
interest_rate, 'external_id': application_id}
if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:
if local_app and local_app.status != ApplicationStatus.REPAID.value:
sub_overdue_bill['collection_id'] = local_app.id
local_app.amount += amount
local_app.amount_net += amount_net
local_app.save()
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('application %s,sub_bill_id:%s overdue created' %
(application_id, sub_bill_id))
return
else:
id = idg()
type = ApplicationType.CASH_LOAN_STAGING.value
sub_overdue_bill['collection_id'] = id
ptp_info = BombingHistory.filter(BombingHistory.application == id).first()
promised_amount = ptp_info and ptp_info.promised_amount
promised_date = ptp_info and ptp_info.promised_date
application = Application.create(id=id, user_id=gold_app['user_id'],
user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[
'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],
contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(
'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=
birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(
gold_app.get('id_ektp')), profile_province=(gold_app.get(
'profile_province') or {}).get('name'), profile_city=(gold_app.get(
'profile_city') or {}).get('name'), profile_district=(gold_app.get(
'profile_district') or {}).get('name'), profile_residence_time=
gold_app.get('profile_residence_time'), profile_residence_type=
gold_app.get('profile_residence_type'), profile_address=gold_app.
get('profile_address'), profile_education=gold_app.get(
'profile_education'), profile_college=(gold_app.get(
'profile_college') or {}).get('name'), job_name=gold_app.get(
'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get
('job_bpjs'), job_user_email=gold_app.get('job_user_email'),
job_type=gold_app.get('job_type'), job_industry=gold_app.get(
'job_industry'), job_department=gold_app.get('job_department'),
job_province=(gold_app.get('job_province') or {}).get('name'),
job_city=(gold_app.get('job_city') or {}).get('name'), job_district
=(gold_app.get('job_district') or {}).get('name'), job_address=
gold_app.get('job_address'), amount=amount, amount_net=amount_net,
interest_rate=interest_rate, term=gold_app.get('term'),
origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=
sub_bill.get('repay_at'), loan_success_times=loan_success_times,
arrived_at=datetime.now(), follow_up_date=datetime.now(),
promised_amount=promised_amount, promised_date=promised_date,
external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=
datetime.now())
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('overdue application %s created', application_id)
Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)
add_contact(application)
def add_contact(application):
logging.info('start add contact for application: %s', application.id)
contacts = Contact.filter(Contact.user_id == application.user_id)
existing_numbers = {contact.number for contact in contacts}
insert_contacts = list()
mon_insert_contact = {}
user_mobile_no = number_strip(application.user_mobile_no)
if user_mobile_no and user_mobile_no not in existing_numbers:
insert_contacts.append({'user_id': application.user_id, 'name':
application.user_name, 'number': user_mobile_no, 'relationship':
Relationship.APPLICANT.value, 'source': 'apply info',
'real_relationship': Relationship.APPLICANT.value})
existing_numbers.add(number_strip(application.user_mobile_no))
extra_phone = GoldenEye().get('/users/%s/extra-phone' % application.user_id
)
if not extra_phone.ok:
extra_phone = []
logging.error('get user %s extra contacts failed', application.user_id)
else:
extra_phone = extra_phone.json()['data']
if extra_phone:
for i in extra_phone:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({'user_id': application.user_id, 'name':
application.user_name, 'number': number, 'relationship':
Relationship.APPLICANT.value, 'source': 'extra phone',
'real_relationship': Relationship.APPLICANT.value})
key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
ec_contact = []
contact = json.loads(application.contact or '[]')
for i in contact:
if number_strip(i['mobile_no']
) not in existing_numbers and number_strip(i['mobile_no']):
ec_contact.append({'user_id': application.user_id, 'name': i[
'name'], 'number': number_strip(i['mobile_no']),
'relationship': Relationship.FAMILY.value, 'sub_relation':
SubRelation.EC.value, 'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number_strip(i['mobile_no']
), ContactType.F_EC.value
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['mobile_no']))
if i['type'] != 1:
continue
if number_strip(i['tel_no']) not in existing_numbers and number_strip(i
['tel_no']):
ec_contact.append({'user_id': application.user_id, 'name': i[
'name'], 'number': number_strip(i['tel_no']),
'relationship': Relationship.FAMILY.value, 'sub_relation':
SubRelation.EC.value, 'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number_strip(i['tel_no']
), ContactType.F_EC.value
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['tel_no']))
if ec_contact:
Contact.insert_many(ec_contact).execute()
if all((application.job_tel, number_strip(application.job_tel),
number_strip(application.job_tel) not in existing_numbers)):
insert_contacts.append({'user_id': application.user_id, 'name':
None, 'number': number_strip(application.job_tel),
'relationship': Relationship.COMPANY.value, 'source':
'basic info job_tel', 'real_relationship': Relationship.COMPANY
.value})
key = user_mobile_no, number_strip(application.job_tel
), ContactType.C_BASIC_INFO_JOB_TEL.value
mon_insert_contact[key] = 1, 0, None
existing_numbers.add(number_strip(application.job_tel))
sms_contacts = GoldenEye().get('/applications/%s/sms-contacts' %
application.external_id)
if not sms_contacts.ok:
sms_contacts = []
logging.info('get user %s sms contacts failed', application.external_id
)
else:
sms_contacts = sms_contacts.json()['data']
if sms_contacts:
for i in sms_contacts:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({'user_id': application.user_id, 'name':
i['name'][:128], 'number': number, 'relationship':
Relationship.SUGGESTED.value, 'source': 'sms contacts',
'real_relationship': Relationship.SUGGESTED.value})
key = user_mobile_no, number, ContactType.S_SMS_CONTACTS.value
mon_insert_contact[key] = 1, 0, i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
cf = GoldenEye().get('/applications/%s/call/frequency' % application.
external_id)
if not cf.ok:
call_frequency = []
logging.error('get application %s call frequency error',
application.external_id)
else:
call_frequency = cf.json()['data']
insert_contacts = []
fm = GoldenEye().get('/applications/%s/contact/family-member' %
application.external_id)
if not fm.ok:
family = []
logging.error('get application %s family-member info error',
application.external_id)
else:
family = fm.json()['data']
if family:
for i in family:
if not i.get('number'):
logging.info('family member %s' % str(i))
continue
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
logging.info('family members: %s' % str(i))
insert_contacts.append({'user_id': application.user_id, 'name':
i['name'][:128], 'number': number, 'relationship':
Relationship.FAMILY.value, 'source': FamilyContactType.
CALLEC.value, 'total_count': i.get('total_count', 1),
'total_duration': i.get('total_duration', 0),
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number, ContactType.F_CALL_EC.value
mon_insert_contact[key] = i.get('total_count', 1), i.get(
'total_duration', 0), i['name'][:128]
existing_numbers.add(number)
mon_update_contact = {}
if call_frequency:
with db.atomic():
count = 1
for i in call_frequency:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
Contact.update(total_count=i['total_count'],
total_duration=i['total_duration']).where(Contact.
number == number, Contact.user_id == application.
user_id)
key = user_mobile_no, number
mon_update_contact[key] = i['total_count'], i[
'total_duration']
continue
if count < 6:
insert_contacts.append({'user_id': application.user_id,
'name': i['name'][:128], 'number': number,
'relationship': Relationship.FAMILY.value,
'total_count': i['total_count'], 'total_duration':
i['total_duration'], 'source': FamilyContactType.
CALLTOP5.value, 'real_relationship': Relationship.
FAMILY.value})
count += 1
key = user_mobile_no, number, ContactType.F_CALL_TOP5.value
mon_insert_contact[key] = i['total_count'], i[
'total_duration'], i['name'][:128]
else:
insert_contacts.append({'user_id': application.user_id,
'name': i['name'][:128], 'number': number,
'relationship': Relationship.SUGGESTED.value,
'total_count': i['total_count'], 'total_duration':
i['total_duration'], 'source': 'call frequency',
'real_relationship': Relationship.SUGGESTED.value})
key = (user_mobile_no, number, ContactType.
S_CALL_FREQUENCY.value)
mon_insert_contact[key] = i['total_count'], i[
'total_duration'], i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
next_apply_list = AccountService().add_contact(application.user_id)
for next_apply in next_apply_list:
number = number_strip(str(next_apply))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=application.
user_name, number=number, relationship=Relationship.
SUGGESTED.value, source='online profile phone',
real_relationship=Relationship.SUGGESTED.value)
key = (user_mobile_no, number, ContactType.
S_ONLINE_PROFILE_PHONE.value)
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
next_applicant = GoldenEye().get('/bomber/%s/dual_contact' %
application.user_id)
if not next_applicant.ok:
next_applicant = []
logging.error('get user %s dual_contact contacts failed' %
application.user_id)
else:
next_applicant = next_applicant.json()['data']
if next_applicant:
for i in next_applicant:
number = number_strip(str(i))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=
application.user_name, number=number, relationship=
Relationship.APPLICANT.value, source='apply info',
real_relationship=Relationship.APPLICANT.value)
key = user_mobile_no, number, ContactType.A_APPLY_INFO.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
numbers = []
try:
numbers = AccountService().ktp_number(path_params={'user_id':
application.user_id})
except Exception as e:
logging.info('request ktp numbers failed %s' % str(e))
for n in numbers:
number = number_strip(str(n))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=application.
user_name, number=number, relationship=Relationship.
APPLICANT.value, source='ktp number', real_relationship=
Relationship.APPLICANT.value)
key = user_mobile_no, number, ContactType.A_KTP_NUMBER.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
try:
ecs = GoldenEye().get('/applications/%s/contact/ec' % application.
external_id)
except Exception as e:
logging.info('request ec-member error: %s' % str(e))
try:
if not ecs.ok:
ec = []
logging.info('get application %s ec-member info error',
application.external_id)
else:
ec = ecs.json()['data']
if ec:
for e in ec:
number = number_strip(e['numbers'])[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=e[
'name'][:128], number=number, relationship=
Relationship.FAMILY.value, source=FamilyContactType
.CONTACTEC.value, real_relationship=Relationship.
FAMILY.value)
key = (user_mobile_no, number, ContactType.F_CONTACT_EC
.value)
mon_insert_contact[key] = 1, 0, e['name'][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add ec_member error:%s' % str(e))
try:
mn = GoldenEye().get('/applications/%s/contact/my_number' %
application.external_id)
except Exception as e:
logging.info('request my_number error: %s' % str(e))
try:
if not mn.ok:
my = []
logging.info('get application %s my_number info error',
application.external_id)
else:
my = mn.json()['data']
if my:
for m in my:
number = number_strip(m)[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=my[m][
:128], number=number, relationship=Relationship.
SUGGESTED.value, source='my number',
real_relationship=Relationship.SUGGESTED.value)
key = user_mobile_no, number, ContactType.S_MY_NUMBER.value
mon_insert_contact[key] = 1, 0, my[m][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add my_member error:%s' % str(e))
try:
cn = GoldenEye().get('/applications/%s/contact/company-number' %
application.external_id)
except Exception as e:
logging.info('request company-number error: %s' % str(e))
try:
if not cn.ok:
cn = []
logging.info('get application %s company_number info error',
application.external_id)
else:
cn = cn.json()['data']
if cn:
for c in cn:
number = c
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=cn[c][
:128], number=number, relationship=Relationship.
COMPANY.value, source='company', real_relationship=
Relationship.COMPANY.value)
key = user_mobile_no, number, ContactType.C_COMPANY.value
mon_insert_contact[key] = 1, 0, cn[c][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add company_member error:%s' % str(e))
try:
ol = AccountService().other_login_contact(userId=application.user_id)
except Exception as e:
logging.error('request other_login error: %s' % e)
ol = {}
try:
for o in ol:
number = number_strip(o)
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=ol[o][:128
], number=number, relationship=Relationship.SUGGESTED.
value, source='other_login', real_relationship=
Relationship.SUGGESTED.value)
key = user_mobile_no, number, ContactType.S_OTHER_LOGIN.value
mon_insert_contact[key] = 1, 0, ol[o][:128]
except Exception as e:
logging.error('add other_login number error:%s' % e)
logging.info('add contact for application %s finished', application.id)
if mon_insert_contact or mon_update_contact:
send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON, {
'user_mobile_no': user_mobile_no, 'insert_contact': str(
mon_insert_contact), 'update_contact': str(mon_update_contact),
'user_id': application.user_id, 'name': application.user_name})
<|reserved_special_token_0|>
def get_contact_from_mongo(number):
if not number:
return []
query = TotalContact.objects(src_number=number, source__in=TotalContact
.available()).order_by('source')
lst = []
for c in query:
relation = TotalContact.relationship(c.source)
if relation == -1:
continue
source = TotalContact.str_source(c.source)
if not source:
continue
lst.append({'related_number': c.dest_number, 'source': source,
'is_calc': c.is_calc, 'total_count': c.total_count,
'total_duration': c.total_duration, 'relation': relation,
'name': c.dest_name})
return lst
<|reserved_special_token_0|>
def check_key_not_none(payload, keys):
for key in keys:
if payload.get(key) is None:
logging.error('Missing args {}'.format(str(key)))
return False
return True
@action(MessageAction.BILL_PAID)
def bill_paid(payload, msg_id):
validate = check_key_not_none(payload, ['external_id', 'late_fee_part',
'principal_part', 'paid_at', 'bill_sub_id', 'partner_bill_id'])
if not validate:
logging.error('payload key not fully pass in.')
return
external_id = payload['external_id']
late_fee_part = Decimal(payload['late_fee_part'])
principal_part = Decimal(payload['principal_part'])
paid_at = payload['paid_at']
partner_bill_id = payload['partner_bill_id']
logging.debug(
'application %s paid principal part %s, paid late fee part %s',
external_id, principal_part, late_fee_part)
application = Application.filter(Application.external_id == external_id
).order_by(-Application.created_at).first()
if not application:
logging.info('application %s paid, not found application', external_id)
return
sub_bill_id = payload['bill_sub_id']
overdue_bill = OverdueBillR.select().where(OverdueBillR.collection_id ==
application.id, OverdueBillR.sub_bill_id == sub_bill_id).first()
if (application.type == ApplicationType.CASH_LOAN_STAGING.value and not
overdue_bill):
logging.info('bill sub not in bomber %s', sub_bill_id)
return
with db.atomic():
repay_at = str_no_utc_datetime(payload['latest_repay_at'])
Application.update(repay_at=repay_at).where(Application.id ==
application.id).execute()
RepaymentLog.create(application=application.id, is_bombed=True,
current_bomber=application.latest_bomber_id, cycle=application.
cycle, principal_part=principal_part, late_fee_part=
late_fee_part, repay_at=paid_at, ptp_bomber=application.
ptp_bomber, latest_call=application.latest_call, periods=
overdue_bill.periods if overdue_bill else None, overdue_bill_id
=overdue_bill.id if overdue_bill else None, partner_bill_id=
partner_bill_id)
phone_status = PhoneStatus.CONNECTED.value
real_relationship = RealRelationship.user_values()
commit = CallActionCommit.NO.value
number = CallActions.select(CallActions.number).where(CallActions.
phone_status == phone_status, CallActions.real_relationship <<
real_relationship, CallActions.commit == commit, CallActions.
application == application.id).order_by(-CallActions.created_at
).first()
if number:
Contact.update(call_priority=PriorityStatus.REPAY.value).where(
Contact.user_id == application.user_id, Contact.
call_priority == PriorityStatus.LAST.value).execute()
Contact.update(call_priority=PriorityStatus.LAST.value).where(
Contact.user_id == application.user_id, Contact.number ==
number.number).execute()
if not application.latest_bomber_id:
return
Inbox.create(title='application %s,sub_bill_id %s repaid' % (
application.external_id, sub_bill_id), content=
'application %s,sub_bill_id %s repaid' % (application.
external_id, sub_bill_id), receiver=application.
latest_bomber_id or application.last_bomber_id, category=
InboxCategory.REPAID.value)
@action(MessageAction.BILL_RELIEF)
def bill_relief(payload, msg_id):
"""已废弃"""
bill = payload['head_bill']
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_row = Application.update(repay_at=repay_at).where(Application.
id == bill['external_id']).execute()
logging.info('application %s bill relief done', bill['external_id'])
return updated_row
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)
def calc_overdue_days_over(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query = Application.update(overdue_days=overdue_days).where(Application
.status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.
overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)
updated_rows_count = query.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count)
try:
calc_overdue_days_over_instalment()
except Exception as e:
logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))
apps = Application.filter(Application.status << [ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value], Application.overdue_days > 95,
Application.promised_date.is_null(True) | (fn.DATE(Application.
promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {
'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
def calc_overdue_days_over_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days > 95)
updated_rows_count = query.execute()
logging.info(
'calc_overdue_days_over_instalment done,count:%s,status:%s' % (
updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.collection_id,
OverdueBill.overdue_days).join(Application, JOIN_LEFT_OUTER, on
=OverdueBill.collection_id == Application.id).where(Application
.status == status, Application.type == ApplicationType.
CASH_LOAN_STAGING.value)
app_update = {}
for ob in overdue_bills:
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)
def calc_overdue_days(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query_unclaimed = Application.update(overdue_days=overdue_days).where(
Application.status == ApplicationStatus.UNCLAIMED.value,
Application.overdue_days <= 95, Application.type == ApplicationType
.CASH_LOAN.value)
updated_rows_count_unclaimed = query_unclaimed.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_unclaimed)
query_processing = Application.update(overdue_days=overdue_days).where(
Application.status == ApplicationStatus.PROCESSING.value,
Application.overdue_days <= 95, Application.type == ApplicationType
.CASH_LOAN.value)
updated_rows_count_processing = query_processing.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_processing)
query_test = Application.update(overdue_days=overdue_days).where(
Application.status == ApplicationStatus.AB_TEST.value, Application.
overdue_days <= 95, Application.type == ApplicationType.CASH_LOAN.value
)
updated_rows_count_test = query_test.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_test)
calc_overdue_days_instalment()
apps = Application.select(Application.id).where(Application.status << [
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value, ApplicationStatus.AB_TEST.value], Application.overdue_days <=
95, Application.promised_date.is_null(True) | (fn.DATE(Application.
promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {
'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
Application.update(C1A_entry=datetime.now()).where(Application.status <<
[ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value, ApplicationStatus.AB_TEST.value], Application.overdue_days == 4
).execute()
def calc_overdue_days_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
today_now_time = datetime.now()
month_first_day = today_now_time.replace(day=1, hour=1, minute=30,
second=0, microsecond=0)
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days <= 95)
updated_rows_count = query.execute()
logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %
(updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.
created_at, OverdueBill.collection_id, OverdueBill.overdue_days
).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.
collection_id == Application.id).where(Application.status ==
status, Application.type == ApplicationType.CASH_LOAN_STAGING.value
)
app_update = {}
for ob in overdue_bills:
if (ob.status == ApplicationStatus.REPAID.value and ob.
created_at < month_first_day):
continue
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)
def automatic_escalation(payload, msg_id):
app_ids = payload.get('application_list', [])
if not app_ids:
return
apps = Application.select().where(Application.id.in_(app_ids),
Application.status != ApplicationStatus.REPAID.value)
for a in apps:
new_cycle = application_entry_different_calculations(a)
if a.overdue_days < 90:
logging.info(
'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'
.format(a.id, new_cycle, a.cycle, a.overdue_days))
if new_cycle > a.cycle:
with db.atomic():
if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle
.C1B.value):
bomber_id = (a.latest_bomber_id if a.latest_bomber_id else
a.cycle)
DispatchAppHistory.update(out_at=datetime.now(),
out_overdue_days=a.overdue_days).where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == bomber_id).execute()
Escalation.create(application=a.id, type=EscalationType.
AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,
current_cycle=a.cycle, escalate_to=new_cycle,
current_bomber_id=a.latest_bomber)
dis_app_update = DispatchApp.update(status=DisAppStatus.
ABNORMAL.value).where(DispatchApp.application == a.id)
dis_app_update.execute()
a.cycle = new_cycle
a.last_bomber = a.latest_bomber
a.status = ApplicationStatus.UNCLAIMED.value
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
if new_cycle == Cycle.C1B.value:
a.C1B_entry = datetime.now()
elif new_cycle == Cycle.C2.value:
a.C2_entry = datetime.now()
elif new_cycle == Cycle.C3.value:
a.C3_entry = datetime.now()
a.save()
logging.info('automatic escalation done')
def application_entry_different_calculations(app):
conf = {(1): [1, 10], (2): [11, 30], (3): [31, 60], (4): [61, 90], (5):
[91, 999999]}
for new_cycle, scopes in conf.items():
if scopes[0] <= app.overdue_days <= scopes[1]:
return new_cycle
return app.cycle
@action(MessageAction.BOMBER_CALC_SUMMARY)
def cron_summary(payload, msg_id):
"""已废弃"""
employees = Bomber.select(Bomber, Role).join(Role)
summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,
'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,
'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':
0, 'sms_sent': 0} for i in employees}
now_date = date.today()
cal_date = now_date - timedelta(days=1)
claimed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('claimed')).where(fn.DATE(Application.
claimed_at) == cal_date, Application.status << [ApplicationStatus.
PROCESSING.value, ApplicationStatus.REPAID.value], Application.
latest_bomber.is_null(False)).group_by(Application.latest_bomber)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False)).group_by(
Application.latest_bomber)
completed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('completed')).where(Application.
latest_bombing_time.is_null(False), fn.DATE(Application.
latest_bombing_time) == cal_date, Application.latest_bomber.is_null
(False)).group_by(Application.latest_bomber)
escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(
Escalation.id).alias('escalated')).where(fn.DATE(Escalation.
created_at) == cal_date, Escalation.type == EscalationType.
AUTOMATIC.value, Escalation.current_bomber.is_null(False),
Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation
.current_bomber)
transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).
alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==
cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(
Transfer.operator)
promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(
BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.
created_at) == cal_date, BombingHistory.result == BombingResult.
HAS_PROGRESS.value).group_by(BombingHistory.bomber)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,
RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.
current_bomber)
calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id
).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==
cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)
calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.
record_id).alias('calls_connected')).where(fn.DATE(CallLog.
time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==
'1').group_by(CallLog.user_id)
sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(
ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_
(ConnectType.sms()), ConnectHistory.created_at >= cal_date,
ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)
for i in claimed:
summary[i.latest_bomber_id]['claimed'] += i.claimed
for i in completed:
summary[i.latest_bomber_id]['completed'] += i.completed
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
for i in escalated:
summary[i.current_bomber_id]['escalated'] += i.escalated
for i in transferred:
summary[i.operator_id]['transferred'] += i.transferred
for i in promised:
summary[i.bomber_id]['promised'] += i.promised
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in calls_made:
summary[int(i.user_id)]['calls_made'] += i.calls_made
for i in calls_connected:
summary[int(i.user_id)]['calls_connected'] += i.calls_connected
for i in sms_sent:
summary[i.operator_id]['sms_sent'] += i.sms_sent
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'claimed': data['claimed'], 'completed': data['completed'],
'cleared': data['cleared'], 'escalated': data['escalated'],
'transferred': data['transferred'], 'promised': data['promised'
], 'amount_recovered': data['amount_recovered'], 'calls_made':
data['calls_made'], 'calls_connected': data['calls_connected'],
'sms_sent': data['sms_sent'], 'date': cal_date})
if insert_args:
Summary.insert_many(insert_args).execute()
cycle_args = []
escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(
Escalation.id).alias('escalated_in')).where(Escalation.status ==
ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==
cal_date).group_by(Escalation.escalate_to)
for i in escalated_in:
cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.
escalated_in, 'date': cal_date})
amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM
(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)
for i in amount_recovered_total:
amount_recovered_total = i.principal_part + i.late_fee_part
cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':
amount_recovered_total, 'date': cal_date})
if cycle_args:
Summary.insert_many(cycle_args).execute()
logging.info('cal summary done')
send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})
@action(MessageAction.BOMBER_CALC_SUMMARY2)
def cron_summary2(payload, msg_id):
"""已废弃,定时任务还在执行,具体情况待确定"""
cal_date = date.today() - timedelta(days=1)
employees = Bomber.select(Bomber, Role).join(Role)
auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,
AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')
).where(fn.DATE(AutoCallActions.created_at) == cal_date)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.
is_null(False), RepaymentLog.is_bombed == True)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False))
auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,
AutoCallActions.result)
amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)
cleared = cleared.group_by(Application.latest_bomber)
summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,
'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered':
0} for e in employees}
for a in auto_call_actions:
summary[a.bomber_id]['answered_calls'] += a.count
if a.result == AutoCallResult.PTP.value:
summary[a.bomber_id]['ptp'] += a.count
if a.result == AutoCallResult.FOLLOW_UP.value:
summary[a.bomber_id]['follow_up'] += a.count
if a.result == AutoCallResult.NOT_USEFUL.value:
summary[a.bomber_id]['not_useful'] += a.count
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'answered_calls': data['answered_calls'], 'ptp': data['ptp'],
'follow_up': data['follow_up'], 'not_useful': data['not_useful'
], 'cleared': data['cleared'], 'amount_recovered': str(data[
'amount_recovered']), 'date': cal_date})
if insert_args:
Summary2.insert_many(insert_args).execute()
@action(MessageAction.BOMBER_SYNC_CONTACTS)
def sync_suggested_contacts(payload, msg_id):
""" suggested contacts sync """
applications = Application.select(Application.id, Application.user_id
).where(Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value])
logging.debug('start sync contact')
for a in applications:
sync_contacts(a)
logging.info('contact sync finished')
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_DISCOUNT_APPROVED)
def bomber_discount_approved(payload, msg_id):
app_id = payload['id']
msg_type = payload['msg_type']
discount_to = payload['discount_to']
effective_to = payload['effective_to']
application = Application.filter(Application.id == app_id).first()
if not application:
logging.error(
'discount approved msg send failed application %s not found',
app_id)
return
template = Template.select(Template.text, Template.app).where(Template.
type == ConnectType.AUTO_SMS.value, Template.id << Template.
get_auto_sms_tpl(msg_type), Template.app == application.app).first()
if not template:
logging.error('discount approved msg send failed template %s not found'
, msg_type)
return
promised_date = None
if application.promised_date:
promised_date = application.promised_date.strftime('%d-%m-%Y')
tpl_data = {'user_name': application.user_name, 'due_days': application
.overdue_days, 'app_name': application.app, 'phone': application.
user_mobile_no, 'cs_number': cs_number_conf.get(application.app,
'02150202889'), 'promised_date': promised_date, 'discount_to':
discount_to, 'effective_to': effective_to}
content = template.text.format(**tpl_data)
data_list = [{'receiver': '62' + application.user_mobile_no, 'content':
content, 'title': ''}]
send_sms(data_list, msg_type, application.app)
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_AUTO_CALL_LIST)
def bomber_auto_call_list(payload, msg_id):
with db.atomic():
bomber_dispatch_app()
dispatch_instalment_app()
dis_apps = DispatchApp.select(DispatchApp.application).where(
DispatchApp.status == DisAppStatus.NORMAL.value)
c1_apps = Application.select(Application.id, Application.cycle,
Application.follow_up_date, Application.called_times).where(Application
.status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.
AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.
is_rejected == False, Application.promised_date.is_null(True) | (fn
.DATE(Application.promised_date) < datetime.today().date())).order_by(
Application.overdue_days, Application.apply_at)
dis_apps_ids = [da.application_id for da in dis_apps]
insert_args = []
for a in c1_apps:
if a.id in dis_apps_ids:
continue
insert_args.append({'application': a.id, 'cycle': a.cycle,
'follow_up_date': a.follow_up_date, 'called_times': 1 if a.
called_times else 0, 'description': 'init'})
if not insert_args:
logging.error('no application need auto call')
with db.atomic():
AutoCallList.delete().execute()
for idx in range(0, len(insert_args), 100):
AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()
for idx in range(0, len(insert_args), 100):
application_list = [i['application'] for i in insert_args[idx:idx +
100]]
send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {
'application_list': application_list})
logging.info('bomber generate auto call list finished')
send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})
class ChangeBomberTool(object):
@staticmethod
def in_record(bomber_id, ids, bd):
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.
alias('application_id'), R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(
'expected_out_time'), Application.overdue_days.alias(
'entry_overdue_days')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << ids)
Application.update(latest_bomber=bomber_id).where(Application.id.
in_(ids)).execute()
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bd),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@staticmethod
def out_record(a, bd):
_id = str(a.id)
DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a
.overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]
.get('principal_paid')), out_late_fee_pending=bd[_id].get(
'late_fee') - bd[_id].get('late_fee_paid')).where(
DispatchAppHistory.application == a.id, DispatchAppHistory.
bomber_id == a.latest_bomber_id).execute()
a.last_bomber = a.latest_bomber
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
a.save()
@staticmethod
def classify(l, b):
if len(l) == 1:
return l[0]
_l = filter(lambda x: x['bomber'] != b, l)
return min(_l, key=lambda x: len(x['ids']))
<|reserved_special_token_0|>
def bomber_dispatch_app():
try:
c1a_dispatch_app()
except Exception as e:
logging.error('c1a_dispatch_app error:%s' % str(e))
cycle = {(1): 10, (2): 30, (3): 60, (4): 90}
apps = Application.select().where(fn.DATE(Application.C2_entry) == date
.today(), Application.type == ApplicationType.CASH_LOAN.value)
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C2.value)
apps_ids = [a.id for a in apps]
dispatch_inserts = []
start_index = 0
apps_length = len(apps_ids)
logging.warning('apps length %s' % str(apps_length))
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)
gen = CycleIter([b.id for b in bombers])
existing_list = []
end_index = start_index + int(apps_length * p.app_percentage)
logging.info('partner length %s' % str(end_index))
if not apps_ids[start_index:end_index]:
continue
bills = BillService().bill_list(application_ids=apps_ids[
start_index:end_index])
bill_dict = {bill['application_id']: bill for bill in bills}
for a_id in apps_ids[start_index:end_index]:
bomber = average_gen(gen, existing_list)
q = DispatchApp.delete().where(DispatchApp.application == a_id
).execute()
dispatch_inserts.append({'application': a_id, 'bomber': bomber,
'partner': p.id})
application = Application.select().where(Application.id == a_id
).first()
application.latest_bomber = bomber
application.status = ApplicationStatus.AB_TEST.value
application.ptp_bomber = None
application.save()
day_next_cycle = cycle.get(application.cycle
) - application.overdue_days
DispatchAppHistory.create(application=a_id, partner_id=p.id,
bomber_id=bomber, entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - Decimal(
bill_dict[a_id].get('principal_paid')),
entry_late_fee_pending=Decimal(bill_dict[a_id].get(
'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')
), expected_out_time=date.today() + timedelta(days=
day_next_cycle))
start_index = end_index
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
config = SystemConfig.prefetch(SCI.AB_TEST_C2)
c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)
c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)
c2 = apps_ids[start_index:]
if c2:
bills = BillService().bill_list(application_ids=c2)
else:
bills = []
bill_dict = {bill['application_id']: bill for bill in bills}
logging.info('c2 AB_test length: %s' % str(c2))
gen = CycleIter(c2_bomber)
existing_list = []
for c in c2:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == c).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = 46 - application.overdue_days
DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at
=datetime.now(), entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - bill_dict[c].get(
'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(
'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),
expected_out_time=date.today() + timedelta(days=day_next_cycle))
ab_test_other()
def c1a_dispatch_app():
today = datetime.today().date()
tomorrow = today + timedelta(days=1)
c1a_apps = Application.select().where(Application.status << [
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value], Application.dpd1_entry >= today, Application.dpd1_entry <
tomorrow, Application.type == ApplicationType.CASH_LOAN.value)
all_aids = [a.id for a in c1a_apps]
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C1A.value)
end = 0
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
is_del == 0)
start = end
end += int(len(all_aids) * p.app_percentage)
aids = all_aids[start:end]
bids = [b.id for b in bombers]
if not bids or not aids:
continue
average_number = get_average_number(len(aids), len(bids))
p_end = 0
for i, bid in enumerate(bids):
p_start = p_end
p_end += average_number[i]
b_aids = aids[p_start:p_end]
with db.atomic():
q = Application.update(latest_bomber=bid, status=
ApplicationStatus.AB_TEST.value).where(Application.id <<
b_aids).execute()
params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,
'application_ids': b_aids, 'dest_bomber_id': bid}
new_in_record(**params)
try:
dispatch_inserts = []
for aid in b_aids:
dispatch_inserts.append({'application': aid, 'bomber':
bid, 'partner': p.id, 'status': DisAppStatus.NORMAL
.value})
if dispatch_inserts:
q = DispatchApp.insert_many(dispatch_inserts).execute()
except Exception as e:
logging.error('c1a分件写入dispatch_app error:%s' % str(e))
<|reserved_special_token_0|>
def get_cash_bomber(bids, cycle):
cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==
0, Bomber.instalment != cycle)
cash_bids = [b.id for b in cash_bombers]
return cash_bids
<|reserved_special_token_0|>
def out_and_in_record_instalment(**kwargs):
if not kwargs.get('application_ids'):
return
out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(
DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True)).execute()
cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(kwargs['dest_bomber_id'])).alias(
'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.
alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).
alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << kwargs[
'application_ids'])
application_list = list(subquery)
for idx in range(0, len(application_list), 50):
applications = application_list[idx:idx + 50]
app_ids = [i.application_id for i in applications]
overdue_bills = OverdueBill.select().where(OverdueBill.
collection_id << app_ids)
sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]
bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)
insert_args = lambad_instalment_result(bill_list, applications)
if not insert_args:
continue
DispatchAppHistory.insert_many(insert_args).execute()
<|reserved_special_token_0|>
def c1b_dispatch_in_record(**kwargs):
app_ids = kwargs.get('apps')
partner_id = kwargs.get('partner_id', 'null')
bill_dict = kwargs.get('bill_dict')
period = kwargs.get('period')
bomber_id = kwargs.get('bomber_id')
if not all([app_ids, partner_id, bill_dict, period]):
return False
bill_dict = {str(k): v for k, v in bill_dict.items()}
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().
alias('entry_at'), Application.overdue_days.alias(
'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.id << app_ids)
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@action(MessageAction.BOMBER_AUTO_CALL_CONTACT)
def bomber_auto_call_contact(payload, msg_id):
application_list = payload['application_list']
applications = []
for app_id in application_list:
applications.append(Application.filter(Application.id == app_id).
first())
with db.atomic():
for application in applications:
cycle = application.cycle
contacts = Contact.select().where(Contact.user_id ==
application.user_id, Contact.latest_status.not_in(
ContactStatus.no_use())).order_by(-Contact.useful, Contact.
relationship, -Contact.total_duration, -Contact.total_count)
level1 = []
level2 = []
level3 = []
level = []
for c in contacts:
if c.relationship == Relationship.APPLICANT.value:
level.append(c)
elif c.relationship == Relationship.FAMILY.value:
level1.append(c)
elif c.relationship == Relationship.COMPANY.value:
level2.append(c)
elif c.relationship == Relationship.SUGGESTED.value:
level3.append(c)
contacts = level + level2 + level1 + level3
numbers = []
fc_count = 0
app_calls = []
need_verify = False
for eac_contact in contacts:
if (eac_contact.relationship == Relationship.FAMILY.value and
eac_contact.useful == ContactsUseful.NONE.value):
need_verify = True
break
if need_verify:
logging.info('Found contact need update. app id {}'.format(
str(application.id)))
app_calls = AuditService().phone_invalid(cat=Relationship(1
).name, application_id=application.external_id)
call_history = True
c1b_family_dict = defaultdict(list)
for c in contacts:
if c.relationship == Relationship.COMPANY.value:
if cycle == Cycle.C1A.value:
call_history = check_call_history(application)
break
if cycle == Cycle.C1B.value:
if (c.source != CompanyContactType.
BASIC_INFO_JOB_TEL.value):
continue
if c.relationship == Relationship.FAMILY.value:
if cycle == Cycle.C1A.value:
call_history = check_call_history(application)
break
if c.useful == ContactsUseful.NONE.value:
c.useful = check_valid_phone(app_calls, c)
c.save()
if c.useful == ContactsUseful.INVALID.value:
logging.info('Found invalid contact. {}'.format(str
(c.id)))
continue
if cycle == Cycle.C1B.value:
c1b_family_dict[c.source].append(c.number)
continue
if c.relationship == Relationship.SUGGESTED.value:
if cycle not in (Cycle.C2.value, Cycle.C3.value):
break
if cycle == Cycle.C2.value and fc_count > 10:
break
if cycle == Cycle.C3.value and fc_count > 20:
break
fc_count += 1
numbers.append(c.number)
if len(numbers) == 0 or not call_history:
src_contact = Contact.select().where(Contact.user_id ==
application.user_id, Contact.source in
FamilyContactType.c1a_order())
c1a_family_dict = defaultdict(list)
for e in src_contact:
c1a_family_dict[e.source].append(e.number)
for call_type in FamilyContactType.c1a_order():
numbers.extend(c1a_family_dict[call_type])
if cycle == Cycle.C1B.value:
for call_type in FamilyContactType.c1b_order():
numbers.extend(c1b_family_dict[call_type])
numbers = list(set(numbers))
update_query = AutoCallList.update(numbers=','.join(numbers)
).where(AutoCallList.application == application.id)
update_query.execute()
<|reserved_special_token_0|>
def check_call_history(application):
app_create_at = application.created_at + timedelta(days=4)
if datetime.today().date() > app_create_at.date():
call_actions = CallActions.select().where(CallActions.type == 0,
CallActions.application == application.id, CallActions.
created_at > datetime.now() - timedelta(days=5))
for call in call_actions:
if call.phone_status == PhoneStatus.CONNECTED.value:
return True
return False
return True
@action(MessageAction.BOMBER_SCAVENGER)
def scavenger(payload, msg_id):
scavenger_time = -60
scavenger = SystemConfig.select().where(SystemConfig.key ==
'SCAVENGER_TIME').first()
if scavenger and scavenger.value.isdigit():
scavenger_time = -int(scavenger.value)
update_auto_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value, description='scavenger').where(AutoCallList.status ==
AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime
.now() + timedelta(minutes=scavenger_time))
count = update_auto_call_list.execute()
logging.info('scavenger processed %s application', count)
mail_box_scavenger_time = -30
mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==
'MAIL_BOX_SCAVENGER_TIME').first()
if mail_box_scavenger and mail_box_scavenger.value.isdigit():
mail_box_scavenger_time = -int(mail_box_scavenger.value)
update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.
value, AutoCallList.updated_at < datetime.now() + timedelta(minutes
=mail_box_scavenger_time))
mail_box_count = update_mail_box_call_list.execute()
logging.info('scavenger update mail box %s', mail_box_count)
update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value
).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.
updated_at < datetime.now() + timedelta(minutes=-30))
ivr_result = update_auto_ivr.execute()
logging.info('scavenger update %s ivr' % ivr_result)
<|reserved_special_token_0|>
@action(MessageAction.REPORT_BOMBER_COLLECTION)
def report_bomber_collection(payload, msg_id):
start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)
).scalar()
now = datetime.now()
if start_date and str(start_date) == str(now)[:10]:
return
end_date = str(now + timedelta(days=1))[:10]
start_date = str(now)[:10]
dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))
all_overdue_loan_sql1 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
GROUP BY 1;
"""
s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()
d1 = OperatedDict(s_data1)
all_overdue_loan_sql2 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE DATE(ba.follow_up_date) > CURDATE()
AND ba.called_counts = 0
GROUP BY 1;
"""
s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()
d2 = OperatedDict(s_data2)
overdue_loans_entered_into_predict_call_system_sql = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE ba.called_counts >= 1
GROUP BY 1;
"""
s_data3 = readonly_db.execute_sql(
overdue_loans_entered_into_predict_call_system_sql).fetchall()
d3 = OperatedDict(s_data3)
loans_completed_sql = """
SELECT ba.cycle, COUNT(DISTINCT ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()
d4 = OperatedDict(s_data4)
connected_calls_automatic_sql = """
SELECT ba.cycle, COUNT(ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()
d5 = OperatedDict(s_data5)
connected_calls_manual_sql = """
SELECT bb.cycle, COUNT(bb.id)
FROM bomber.bombing_history bb
WHERE DATE(bb.created_at) = curdate()
AND (bb.bomber_id < 150 OR bb.bomber_id > 200)
GROUP BY bb.cycle;
"""
s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()
d6 = OperatedDict(s_data6)
logging.info('Directly get data from database successfully.')
c1 = d1 - d2
c2 = d3
c3 = c2 / c1
c4 = d4
c5 = c4 / c2
c6 = d5
c7 = c6 / c4
c8 = d6
c9 = OperatedDict(get_agent())
c10 = (c6 + c8) / c9
try:
c11 = average_call_duration_team(start_date, end_date)
except AttributeError:
c11 = {}
lst = []
for i in range(1, 5):
lst.append({'apply_date': start_date, 'cycle': dct[i],
'all_overdue_loan': c1.get(i, 0),
'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),
'of_overdue_loans_entered_into_predict_call_system': round(c3.
get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),
'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *
100, 1), 'connected_calls_automatic': c6.get(i, 0),
'connected_calls_automatic_completed_loans': round(c7.get(i, 0),
1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i,
0), 'average_calls_agent': round(c10.get(i, 0), 1),
'average_call_duration_team': round(c11.get(i, 0), 1)})
ReportCollection.insert_many(lst).execute()
logging.info('report_bomber_collection:Done!')
@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)
def bomber_auto_call_list_record(payload, msg_id):
"""记录一年的auto_call_list,删除前一天的数据,增加今天的数据"""
now = datetime.now()
if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):
date_sql = """
SELECT DATE(created_at) FROM auto_call_list_record
GROUP BY DATE(created_at) limit 1
"""
del_date = db.execute_sql(date_sql).fetchone()[0]
del_sql = """
DELETE FROM auto_call_list_record WHERE date(created_at) = %s
"""
db.execute_sql(del_sql, [del_date])
sql = """
INSERT INTO auto_call_list_record
SELECT * FROM auto_call_list
"""
db.execute_sql(sql)
logging.info('bomber_auto_call_list_record done')
<|reserved_special_token_0|>
def new_out_record(**kwargs):
if not kwargs['application_ids']:
return
DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.
bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.
application << kwargs['application_ids'], DispatchAppHistory.out_at
.is_null(True)).execute()
if kwargs.get('month_dispatch'):
return
try:
Application.update(ptp_bomber=None).where(Application.id << kwargs[
'application_ids']).execute()
except Exception as e:
logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[
'application_ids'], str(e)))
<|reserved_special_token_0|>
def end_old_application(old_app, paid=False):
if paid:
if old_app.status == OldLoanStatus.WAITING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return old_app.application_id
end_date = old_app.end_date
now = datetime.now()
if now >= max(end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
old_app.save()
return old_app.application_id
<|reserved_special_token_0|>
def in_record(**kwargs):
"""
:param kwargs: dist_partner_id, dist_bomber_id,
expected_out_time, application_ids
:return:
"""
kwargs['dist_partner_id'] = kwargs.get('dist_partner_id') or 'null'
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(kwargs['dist_bomber_id'])).alias(
'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.
alias('entry_overdue_days'), R(str(kwargs['dist_partner_id'])).
alias('partner_id'), R('"{}"'.format(kwargs['expected_out_time'])).
alias('expected_out_time')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << kwargs[
'application_ids'])
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
app_ids = [i.application_id for i in applications]
bill_list = BillService().bill_list(application_ids=app_ids)
bill_dict = {str(bill['application_id']): bill for bill in bill_list}
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
<|reserved_special_token_0|>
def start_old_application(old_app, cancel=False):
application_id = old_app.application_id
if cancel and old_app.status == OldLoanStatus.PAID.value:
now = datetime.now()
if old_app.start_date is None:
old_app.status = OldLoanStatus.WAITING.value
elif now >= max(old_app.end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
DispatchAppHistory.update(out_at=max(old_app.end_date, old_app.
promised_date or now)).where(DispatchAppHistory.bomber_id ==
old_app.bomber_id, DispatchAppHistory.application ==
application_id).execute()
else:
old_app.status = OldLoanStatus.PROCESSING.value
DispatchAppHistory.update(out_at=None).where(DispatchAppHistory
.bomber_id == old_app.bomber_id, DispatchAppHistory.
application == application_id).execute()
old_app.save()
return
application = Application.get_or_none(Application.id == application_id,
Application.status != ApplicationStatus.REPAID.value, Application.
overdue_days > 90, Application.promised_date.is_null(True) | (fn.
DATE(Application.promised_date) < datetime.today().date()))
if not application:
logging.error('Can not set old application %s to start collecting',
application_id)
return
if old_app.status in OldLoanStatus.no_available():
logging.info('%s has finished or paid', old_app.application_id)
return
config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)
sp = config.get(SCI.OLD_APP_PERIOD, SCI.OLD_APP_PERIOD.default_value)
old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value
old_app.status = OldLoanStatus.PROCESSING.value
old_app.bomber_id = old_app_bomber
old_app.start_date = datetime.now()
if not old_app.end_date:
old_app.end_date = datetime.now() + timedelta(days=sp)
old_app.save()
in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,
application_ids=[old_app.application_id], expected_out_time=str(
old_app.end_date))
<|reserved_special_token_0|>
def run_member_sql(sql):
result = [0, 0]
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
sql_result = cursor.fetchone()
if sql_result:
result = sql_result
except Exception as e:
logging.info('run sql error: %s' % str(sql))
return result
<|reserved_special_token_0|>
def get_before_bomber(date_time):
begin_time = str(date_time - timedelta(days=7))
end_time = str(date_time)
old_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at<date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (begin_time, begin_time))
old_data = run_one_sql(old_sql)
new_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at> '%s'
and created_at<'%s'
and overdue_days=1;
"""
% (begin_time, end_time))
new_data = run_one_sql(new_sql)
dpd4_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>date_add('%s',interval 1 day)
and created_at< date_add('%s',interval 1 day)
and overdue_days=4;
"""
% (begin_time, end_time))
dpd4_data = run_one_sql(dpd4_sql)
dpd2_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at< date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (end_time, end_time))
dpd2_data = run_one_sql(dpd2_sql)
all_money = old_data + new_data
repayment = all_money - dpd4_data - dpd2_data
pro = 0
if all_money:
pro = repayment / all_money * 100
RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,
proportion=pro, repayment=repayment)
<|reserved_special_token_0|>
def get_c1a_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=19)
repayment_sql = (
"""
select
sum(b.principal_part+b.late_fee_part) as paid_amount,
cdt
from
(select
br.principal_part, br.late_fee_part,
date(cdt) as cdt, br.repay_at, br.application_id
from (
select ba.id, ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
if not repayment:
return
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
for d in repayment:
repay = d[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
d[1], RepaymentReportInto.cycle == Cycle.C1A.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = round(repay, 3)
pro = repay / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<|reserved_special_token_0|>
def get_c2_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
not_contain_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=37)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_money and repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(
not_contain_money, 3), proportion='0', repayment=0,
is_first_loan=is_first_loan, contain_out=ContainOut.
NOT_CONTAIN.value)
for repay in not_contain_repay:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(all_money, 3),
proportion='0', repayment=0, is_first_loan=
is_first_loan, contain_out=ContainOut.CONTAIN.value)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
def get_c3_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id, ba.c3_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=30)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c3_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c3_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = '%s'
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 4
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C3.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
if not repayment:
return
for repay in repayment:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
repay[1], RepaymentReportInto.cycle == Cycle.C3.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = repay_money
pro = 0
if report.all_money and int(report.all_money):
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<|reserved_special_token_0|>
def get_static_bomber(begin_date):
active_date = begin_date - timedelta(days=8)
bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),
BomberR.last_active_at.alias('active')).where(BomberR.
last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])
summary = []
for bomber in bombers:
summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':
bomber.role.cycle, 'work_ind': 0})
SummaryBomber.insert_many(summary).execute()
<|reserved_special_token_0|>
@time_logger
def get_call_and_made(end_date, begin_date, real_time_query=False):
call_sql = (
"""
select
bomber_id,
count(case when relationship is not null then application_id end)
as 'call_cnt',
count(distinct case when relationship is not null then
application_id end) as 'call_case',
count(case when phone_status=4 then application_id end) as 'connect',
count(distinct case when phone_status=4 then application_id end)
as 'connect_case'
from (
select bomber_id,application_id,phone_status, cycle, relationship
from bomber.call_actions ba
where created_at>'%s' and created_at<'%s'
and type in (0, 1)
) a
group by 1
"""
% (begin_date, end_date))
calls = run_all_sql(call_sql)
if real_time_query:
return calls
for call in calls:
bomber, call_cnt, case_made, connect_cnt, case_connect = call
SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,
call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(
SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date
).execute()
return calls
@time_logger
def get_claimed_cnt(end_date, begin_date, real_time_query=False):
table_date = begin_date - timedelta(days=30)
claimed_sql = (
"""
SELECT
COUNT( `t1`.`application_id` ) AS cnt,
`t1`.`bomber_id` AS bomber_id
FROM
`dispatch_app_history` AS t1
WHERE
( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null )
AND ( `t1`.`bomber_id` != 1000 )
AND ( `t1`.`partner_id` IS null )
AND ( `t1`.`entry_at` > '%s' )
AND ( `t1`.`entry_at` < '%s' )
GROUP BY
`t1`.`bomber_id`
"""
% (begin_date, table_date, end_date))
claimeds = run_all_sql(claimed_sql)
if real_time_query:
return claimeds
for claimed in claimeds:
cnt, bomber_id = claimed
SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == bomber_id).execute()
return claimeds
def get_sms_data(end_data, begin_data):
all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(
'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')
).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.
created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())
).group_by(ConnectHistoryR.operator)
for sms in all_sms:
SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==
begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()
return all_sms
<|reserved_special_token_0|>
@action(MessageAction.SUMMARY_NEW)
def summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_active_bomber(begin_date)
get_call_and_made(end_date, begin_date)
get_claimed_cnt(end_date, begin_date)
get_sms_data(end_date, begin_date)
get_ptp_data(end_date, begin_date)
get_recover_amount(end_date, begin_date)
get_unfollowed(begin_date)
get_unfollowed_call(begin_date)
def get_new_case_amount(begin_date, end_date):
all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.
entry_late_fee_pending + DispatchAppHistoryR.
entry_principal_pending).alias('pending'), DispatchAppHistoryR.
bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')
).where(DispatchAppHistoryR.entry_at > begin_date,
DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.
partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)
for case in all_case:
SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt
=case.cnt).where(SummaryBomber.bomber_id == case.bomber_id,
SummaryBomber.time == begin_date).execute()
return all_case
<|reserved_special_token_0|>
def get_kp_today(begin_date, end_date):
sql = (
"""
select bomber_id, count(distinct application_id)
from(
SELECT bomber_id, application_id
FROM bomber.auto_call_actions a
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where a.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))
UNION
SELECT bomber_id, application_id
FROM bomber.bombing_history b
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where b.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))) result
GROUP BY 1
"""
% (begin_date, end_date, begin_date, begin_date, end_date, begin_date)
)
kp_today = run_all_sql(sql)
for kp in kp_today:
SummaryBomber.update(KP_today_cnt=kp[1]).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == kp[0]).execute()
<|reserved_special_token_0|>
def get_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c2_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c3_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
) a
GROUP BY 1
"""
% (begin_date, end_date, begin_date, end_date, begin_date, end_date))
case_cleared_sums = run_all_sql(sql)
for clear in case_cleared_sums:
SummaryBomber.update(new_case_cleared_sum=clear[1]).where(
SummaryBomber.time == begin_date, SummaryBomber.bomber_id ==
clear[0]).execute()
@time_logger
def get_new_case_call(begin_date, end_date, real_query_time=False):
sql = (
"""
SELECT
bd.bomber_id,
count( DISTINCT bd.application_id )
FROM
bomber.dispatch_app_history bd
INNER JOIN bomber.call_actions bc
ON bd.application_id = bc.application_id
AND bd.bomber_id = bc.bomber_id
AND date( bd.entry_at ) = date( bc.created_at )
WHERE
entry_at > '%s'
AND entry_at < '%s'
AND partner_id IS NULL
GROUP BY 1
"""
% (begin_date, end_date))
new_case_calls = run_all_sql(sql)
if real_query_time:
return new_case_calls
for call in new_case_calls:
SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber
.bomber_id == call[0], SummaryBomber.time == begin_date).execute()
return new_case_calls
@time_logger
def get_calltime_avg(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction,
count( 1 ) AS auto_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
AND an.recording is not null
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]][0] += data[1]
result[data[0]][1] += data[2]
continue
result[data[0]] = [data[1], data[2]]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=
value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == key).execute()
return result
def get_no_calltime_avg(begin_date, end_date):
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND (an.status!='ANSWERED' or an.recording is null)
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
for data in manuals:
SummaryBomber.update(calltime_no_case_sum=data[1],
calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /
data[2] if data[2] else 0).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == data[0]).execute()
@time_logger
def get_calltime_sum(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == key).execute()
return result
<|reserved_special_token_0|>
def get_unfollowed_call(begin_date):
sql = (
"""
SELECT
bomber_id,
count(1)
FROM
(
SELECT
bd.application_id,
date(bd.entry_at) AS entry_at,
bd.bomber_id,
date(bd.out_at) AS out_at
FROM
bomber.dispatch_app_history bd
WHERE
(
out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)
OR out_at IS NULL
)
AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)
AND partner_id IS NULL
AND NOT EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
bd.bomber_id = bc.bomber_id
AND bc.application_id = bd.application_id
AND bc.created_at < '%(begin_date)s'
)
) a
WHERE
EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
a.application_id = bc.application_id
AND a.bomber_id = bc.bomber_id
AND bc.created_at > '%(begin_date)s'
AND bc.created_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND bc.created_at >= a.entry_at
)
OR EXISTS (
SELECT
1
FROM
bomber.application ba
WHERE
ba.id = a.application_id
AND ba.finished_at > '%(begin_date)s'
AND ba.finished_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
)
GROUP BY
1
"""
% {'begin_date': begin_date})
data = run_all_sql(sql)
result = defaultdict(int)
for d in data:
result[d[0]] += d[1]
bomber_list = []
for key, value in result.items():
bomber_list.append(key)
SummaryBomber.update(unfollowed_call_cnt=SummaryBomber.
new_case_call_cnt + value).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == key).execute()
update_sql = SummaryBomber.update(unfollowed_call_cnt=SummaryBomber.
new_case_call_cnt).where(SummaryBomber.time == begin_date)
if bomber_list:
update_sql = update_sql.where(SummaryBomber.bomber_id.not_in(
bomber_list))
update_sql.execute()
return result
@action(MessageAction.UPDATE_SUMMARY_NEW)
def update_summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'UPDATE_SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_new_case_amount(begin_date, end_date)
get_kp_cleared(begin_date, end_date)
get_kp_today(begin_date, end_date)
get_ptp_cnt(begin_date, end_date)
get_ptp_call_cnt(begin_date, end_date)
get_new_case_cleared(begin_date, end_date)
get_new_case_call(begin_date, end_date)
get_calltime_avg(begin_date, end_date)
get_no_calltime_avg(begin_date, end_date)
get_calltime_sum(begin_date, end_date)
def get_cycle_claimed(begin_date, end_date):
sql = (
"""
select cycle,count(1)
from bomber.application where cycle in (1,2,3,4)
and (finished_at is null or (finished_at>'%s'))
and created_at>'2018-09-01'
group by 1
"""
% begin_date)
result = run_all_sql(sql)
return result
@time_logger
def cycle_new_case(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( ba.id ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.created_at ) = bo.which_day_overdue
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c1b_entry ) = bo.which_day_overdue
WHERE
c1b_entry > '%s'
AND c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c2_entry ) = bo.which_day_overdue
WHERE
c2_entry > '%s'
AND c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c3_entry ) = bo.which_day_overdue
WHERE
c3_entry > '%s'
AND c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
all_datas = run_all_sql(sql)
if real_time_query:
return all_datas
for data in all_datas:
SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()
return all_datas
@time_logger
def get_cycle_new_case_call(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.created_at ) = date( bc.created_at )
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c1b_entry ) = date( bc.created_at )
WHERE
ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c2_entry ) = date( bc.created_at )
WHERE
ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c3_entry ) = date( bc.created_at )
WHERE
ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
cycle_datas = run_all_sql(sql)
if real_time_query:
return cycle_datas
for data in cycle_datas:
SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber
.time == begin_date, SummaryBomber.cycle == data[0],
SummaryBomber.bomber_id == data[0]).execute()
return cycle_datas
def get_cycle_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
'1' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.created_at ) = date( br.repay_at )
WHERE ba.created_at > '%s'
AND ba.created_at < '%s'
GROUP BY 1, 2 ) a
UNION
SELECT
'2' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
GROUP BY 1, 2) a
"""
% (begin_date, end_date, begin_date, end_date))
cycle_cleared = run_all_sql(sql)
for i in cycle_cleared:
SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber
.cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.
time == begin_date).execute()
def get_cycle_case_made_cnt(begin_date, end_date):
sql = (
"""
select cycle,count(distinct application) from (
select distinct cycle,application from bomber.auto_call_list_record
where created_at >= '%s'
and created_at < '%s'
and called_counts <> 0
and cycle in (1,2,3,4)
union
select distinct cycle,application_id from bomber.call_actions
where created_at >= '%s'
and created_at < '%s'
and cycle in (1,2,3,4)
) c
group by 1
"""
% (begin_date, end_date, begin_date, end_date))
case_made_datas = run_all_sql(sql)
for case_made_data in case_made_datas:
SummaryBomber.update(case_made_cnt=case_made_data[1]).where(
SummaryBomber.time == begin_date, SummaryBomber.cycle ==
case_made_data[0], SummaryBomber.bomber_id == case_made_data[0]
).execute()
@action(MessageAction.SUMMARY_NEW_CYCLE)
def summary_new_cycle(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'SUMMARY_NEW_CYCLE').first()
if worker_log and worker_log.logs >= 5:
return
cycle_datas = SummaryBomber.select(fn.SUM(SummaryBomber.
new_case_amount_sum).alias('new_case_amount_sum'), fn.SUM(
SummaryBomber.new_case_cleared_sum).alias('new_case_cleared_sum'),
fn.SUM(SummaryBomber.case_made_cnt).alias('case_made_cnt'), fn.SUM(
SummaryBomber.case_connect_cnt).alias('case_connect_cnt'), fn.SUM(
SummaryBomber.promised_cnt).alias('promised_cnt'), fn.SUM(
SummaryBomber.promised_amount).alias('promised_amount'), fn.SUM(
SummaryBomber.cleared_cnt).alias('cleared_cnt'), fn.SUM(
SummaryBomber.cleared_amount).alias('cleared_amount'), fn.SUM(
SummaryBomber.new_case_cnt).alias('new_case_cnt'), fn.SUM(
SummaryBomber.new_case_call_cnt).alias('new_case_call_cnt'), fn.SUM
(SummaryBomber.unfollowed_cnt).alias('unfollowed_cnt'), fn.SUM(
SummaryBomber.unfollowed_call_cnt).alias('unfollowed_call_cnt'), fn
.SUM(SummaryBomber.call_cnt).alias('call_cnt'), fn.SUM(
SummaryBomber.sms_cnt).alias('sms_cnt'), fn.SUM(SummaryBomber.
call_connect_cnt).alias('call_connect_cnt'), fn.SUM(SummaryBomber.
ptp_today_cnt).alias('ptp_today_cnt'), fn.SUM(SummaryBomber.
ptp_today_call_cnt).alias('ptp_today_call_cnt'), fn.SUM(
SummaryBomber.ptp_next_cnt).alias('ptp_next_cnt'), fn.SUM(
SummaryBomber.ptp_next_call_cnt).alias('ptp_next_call_cnt'), fn.SUM
(SummaryBomber.KP_cleared_cnt).alias('KP_cleared_cnt'), fn.SUM(
SummaryBomber.KP_today_cnt).alias('KP_today_cnt'), fn.SUM(
SummaryBomber.work_ind).alias('work_ind'), fn.SUM(SummaryBomber.
calltime_sum).alias('calltime_sum'), fn.SUM(SummaryBomber.
calltime_case_sum).alias('calltime_case_sum'), fn.SUM(SummaryBomber
.calltime_case_cnt).alias('calltime_case_cnt'), fn.SUM(
SummaryBomber.calltime_no_case_sum).alias('calltime_no_case_sum'),
fn.SUM(SummaryBomber.calltime_no_case_cnt).alias(
'calltime_no_case_cnt'), SummaryBomber.cycle.alias('cycle')).where(
SummaryBomber.time == begin_date, SummaryBomber.cycle << Cycle.values()
).group_by(SummaryBomber.cycle)
for cycle_data in cycle_datas:
SummaryBomber.create(bomber_id=cycle_data.cycle, time=begin_date,
cycle=cycle_data.cycle, new_case_amount_sum=cycle_data.
new_case_amount_sum, new_case_cleared_sum=cycle_data.
new_case_cleared_sum, new_case_cleard_rate=0, case_made_cnt=
cycle_data.case_made_cnt, case_made_rate=0, case_connect_cnt=
cycle_data.case_connect_cnt, case_connect_rate=0, promised_cnt=
cycle_data.promised_cnt, promised_amount=cycle_data.
promised_amount, cleared_cnt=cycle_data.cleared_cnt,
cleared_amount=cycle_data.cleared_amount, new_case_cnt=
cycle_data.new_case_cnt, new_case_call_cnt=cycle_data.
new_case_call_cnt, unfollowed_cnt=cycle_data.unfollowed_cnt,
unfollowed_call_cnt=cycle_data.unfollowed_call_cnt, call_cnt=
cycle_data.call_cnt, sms_cnt=cycle_data.sms_cnt,
call_connect_cnt=cycle_data.call_connect_cnt, calltime_case_avg
=0, ptp_today_cnt=cycle_data.ptp_today_cnt, ptp_today_call_cnt=
cycle_data.ptp_today_call_cnt, ptp_next_cnt=cycle_data.
ptp_next_cnt, ptp_next_call_cnt=cycle_data.ptp_next_call_cnt,
KP_cleared_cnt=cycle_data.KP_cleared_cnt, KP_today_cnt=
cycle_data.KP_today_cnt, KP_cleared_rate=0, work_ind=cycle_data
.work_ind, calltime_sum=cycle_data.calltime_sum,
calltime_case_sum=cycle_data.calltime_case_sum,
calltime_case_cnt=cycle_data.calltime_case_cnt,
calltime_no_case_sum=cycle_data.calltime_no_case_sum,
calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,
work_time_sum=cycle_data.work_time_sum)
cycle_claimed = get_cycle_claimed(begin_date, end_date)
for claimed in cycle_claimed:
SummaryBomber.update(claimed_cnt=claimed[1]).where(SummaryBomber.
time == begin_date, SummaryBomber.cycle == claimed[0],
SummaryBomber.bomber_id == claimed[0]).execute()
cycle_new_case(begin_date, end_date)
get_cycle_new_case_call(begin_date, end_date)
get_cycle_new_case_cleared(begin_date, end_date)
get_cycle_case_made_cnt(begin_date, end_date)
all_datas = SummaryBomber.filter(SummaryBomber.time == begin_date)
for data in all_datas:
cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum if
data.new_case_amount_sum else 0) * 100
data.new_case_cleard_rate = cl_rat
case_made_rate = (data.case_made_cnt / data.claimed_cnt if data.
claimed_cnt else 0) * 100
data.case_made_rate = case_made_rate
case_connect_rate = (data.case_connect_cnt / data.case_made_cnt if
data.case_made_cnt else 0) * 100
data.case_connect_rate = case_connect_rate
calltime_case_avg = (data.calltime_case_sum / data.
calltime_case_cnt if data.calltime_case_cnt else 0)
data.calltime_case_avg = calltime_case_avg
calltime_no_case_avg = (data.calltime_no_case_sum / data.
calltime_no_case_cnt if data.calltime_no_case_cnt else 0)
data.calltime_no_case_avg = calltime_no_case_avg
KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt if data.
KP_today_cnt else 0) * 100
data.KP_cleared_rate = KP_cleared_rate
data.save()
<|reserved_special_token_0|>
def get_change_bomber():
cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle
.C3.value}
result = {}
bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,
BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=
BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.
created_at) == date.today(), BomberLog.role_id << list(
cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.
instalment == 0).dicts()
for b_log in bomber_logs:
cycle = cycle_role_map.get(b_log['role_id'])
group_id = b_log['group_id']
if cycle in result:
if group_id not in result[cycle]:
result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],
'new_ids': []}
else:
result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],
'new_ids': []}}
if b_log['operation'] == 0:
result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])
if result:
bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map
.keys())), Bomber.is_del == 0, Bomber.instalment == 0)
for b in bombers:
cycle_result = result.get(cycle_role_map[b.role_id], {})
role_result = cycle_result.get(b.group_id)
if not role_result:
continue
role_result['new_ids'].append(b.id)
resutl_list = []
for cycle, group_dict in result.items():
resutl_list.extend(list(group_dict.values()))
return resutl_list
return []
<|reserved_special_token_0|>
def get_average_number(app_nums, bomber_nums):
average = app_nums // bomber_nums
remainder = app_nums % bomber_nums
average_list = [average for i in range(bomber_nums)]
if remainder == 0:
return average_list
for i in range(remainder):
average_list[i] += 1
random.shuffle(average_list)
return average_list
<|reserved_special_token_0|>
def get_surplus_application(new_ids, del_ids, average_nums, classified_apps):
surplus_apps = []
for del_id in del_ids:
del_res = classified_apps.get(del_id, {})
p_list = del_res.get('p_list', [])
np_list = del_res.get('np_list', [])
del_res['need_num'] = -(len(p_list) + len(np_list))
del_res['to_list'] = np_list + p_list
surplus_apps.extend(p_list)
surplus_apps.extend(np_list)
for index, bid in enumerate(new_ids):
average = average_nums[index]
bomber_app = classified_apps.get(bid)
if not bomber_app:
bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid
).first()
bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':
[], 'np_num': 0, 'need_num': average, 'partner_id': bomber.
partner_id if bomber else ''}
classified_apps[bid] = bomber_app
else:
p_num = len(bomber_app['p_list'])
np_num = len(bomber_app['np_list'])
if p_num > average:
bomber_app['need_num'] = -np_num
else:
bomber_app['need_num'] = average - (p_num + np_num)
bomber_app['p_num'] = p_num
bomber_app['np_num'] = np_num
if bomber_app['need_num'] < 0:
random.shuffle(bomber_app['np_list'])
res_over = bomber_app['np_list'][:-bomber_app['need_num']]
bomber_app['to_list'] = res_over
surplus_apps.extend(res_over)
classified_apps_list = sorted(classified_apps.values(), key=lambda x: x
['need_num'], reverse=True)
return surplus_apps, classified_apps_list
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)
def bomber_dispatch_applications(payload, msg_id):
change_bombers = get_change_bomber()
instalment_change_bombers = get_instalment_change_bomber()
params = {ApplicationType.CASH_LOAN.value: change_bombers,
ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}
for type, bombers in params.items():
change_bomber_dispatch_apps(change_bombers=bombers, type=type)
@action(MessageAction.REPAIR_BOMBER)
def repair_bomber(payload, msg_id):
app_mobile = payload['app_mobile']
username = payload.get('user_name')
logging.info('start repair bomber, number: %s' % app_mobile)
if 'mobile_no' in payload and payload['mobile_no']:
mobile = number_strip(str(payload['mobile_no']))[:64]
name = payload.get('mobile_name')
application = Application.filter(Application.user_mobile_no == mobile)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, mobile, username, name)
if 'tel_no' in payload and payload['tel_no']:
tel_no = number_strip(str(payload['tel_no']))[:64]
name = payload.get('tel_name')
application = Application.filter(Application.user_mobile_no == tel_no)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, tel_no, username, name)
<|reserved_special_token_0|>
def get_summary_daily_time():
mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')
mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')
now_date = datetime.now()
now_date_time = now_date.time()
today_str = str(now_date.date())
if now_date_time < mid_time_t1.time():
yes_date = now_date - timedelta(days=1)
yes_date_str = str(yes_date.date())
begin_str = yes_date_str + ' 17:20:00'
end_str = today_str + ' 00:00:00'
elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():
begin_str = today_str + ' 00:00:00'
end_str = today_str + ' 12:40:00'
else:
begin_str = today_str + ' 12:40:00'
end_str = today_str + ' 17:20:00'
begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')
end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')
summary_datetime = now_date - timedelta(minutes=30)
summary_date = summary_datetime.date()
return begin_time, end_time, summary_date
@action(MessageAction.SUMMARY_DAILY)
def summary_daily_data(payload, msg_id):
begin_time, end_time, summary_date = get_summary_daily_time()
call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.
bomber_id, CallActionsR.application_id, CallActionsR.promised_date,
CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(
CallActionsR.created_at >= begin_time, CallActionsR.created_at <
end_time, CallActionsR.type << (0, 1))
summary_dailys = {}
for call in call_actions:
if call.bomber_id not in summary_dailys:
summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.
bomber_id, 'summary_date': str(summary_date)}
if call.name and call.number:
summary_dailys[call.bomber_id]['call_cnt'] += 1
if call.promised_date:
summary_dailys[call.bomber_id]['ptp_cnt'] += 1
C1_sql = (
"""
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,a.cycle
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,2,4,5)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
"""
% (begin_time, end_time))
C1_repayment = run_all_sql(C1_sql)
other_sql = (
"""
select current_bomber_id,sum(pay_amount) as pay_amount,cycle
from (
select application_id,current_bomber_id,pay_amount,repay_at,cycle
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at,
br.cycle
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (3,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
"""
% (begin_time, end_time))
other_repayment = run_all_sql(other_sql)
all_repayment = C1_repayment + other_repayment
for res in all_repayment:
bomber_id, pay_amount, cycle = res
if bomber_id in summary_dailys:
summary_dailys[bomber_id]['repayment'] += pay_amount
else:
summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': cycle, 'repayment': pay_amount, 'bomber_id':
bomber_id, 'summary_date': str(summary_date)}
insert_values = list(summary_dailys.values())
if insert_values:
SummaryDaily.insert_many(insert_values).execute()
<|reserved_special_token_0|>
def get_app_logs(apps):
app_logs = {}
all_np_apps = []
all_p_apps = []
for a in apps:
latest_bomber = a['latest_bomber']
latest_bomber = a['cycle'] if not latest_bomber else latest_bomber
if latest_bomber in app_logs:
app_logs[latest_bomber]['to_ids'].append(a['id'])
else:
app_logs[latest_bomber] = {'bomber_id': latest_bomber, 'to_ids':
[a['id']], 'np_ids': [], 'p_ids': []}
if a['promised_date'] and a['promised_date'].date() >= datetime.now(
).date():
app_logs[latest_bomber]['p_ids'].append(a['id'])
all_p_apps.append(a)
else:
app_logs[latest_bomber]['np_ids'].append(a['id'])
all_np_apps.append(a)
return app_logs, all_np_apps, all_p_apps
def month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):
apps = list(apps)
np_apps = list(np_apps)
random.shuffle(np_apps)
apps_len = len(apps)
np_apps_len = len(np_apps)
end = 0
all_app_precentage = 0
partners = Partner.select().where(Partner.cycle == cycle, Partner.
status == PartnerStatus.NORMAL.value)
for p in partners:
all_app_precentage += p.app_percentage
for partner in partners:
bombers = Bomber.select().where(Bomber.partner == partner.id,
Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.
value)
bids = {b.id: b for b in bombers}
if len(bids) == 0:
logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))
continue
start = end
if np_apps_len >= int(apps_len * all_app_precentage):
end = start + int(apps_len * partner.app_percentage)
else:
end = start + int(np_apps_len * partner.app_percentage /
all_app_precentage)
partner_app = np_apps[start:end]
dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)
np_apps = np_apps[end:]
return np_apps
def month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):
sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',
(4): 'AB_TEST_C3'}
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0
)
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
bombers = bombers.where(Bomber.instalment == 0)
bids = {b.id: b for b in bombers}
if cycle == Cycle.C1A.value:
np_ids = [a['id'] for a in np_apps]
np = Application.update(status=ApplicationStatus.PROCESSING.value,
ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids
).execute()
bomber_app_logs = app_logs.get(cycle, {})
out_param = {'application_ids': bomber_app_logs.get('to_ids', []),
'month_dispatch': 1, 'src_bomber_id': cycle}
new_out_record(**out_param)
in_param = {'cycle': cycle, 'application_ids': np_ids,
'dest_bomber_id': cycle}
new_in_record(**in_param)
bomber_app_logs['need_num'] = len(np_apps)
bomber_app_logs['form_ids'] = np_ids
bomber_app_logs['status'] = 1
else:
dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)
dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)
def dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,
type=ApplicationType.CASH_LOAN.value):
apps = list(apps)
random.shuffle(apps)
bids_list = list(bids.keys())
if len(bids_list) <= 0:
logging.info('get_dispatch_app_to_bomber no bids')
return
average_num = get_average_number(len(apps), len(bids_list))
bomber_end = 0
with db.atomic():
for index, bid in enumerate(bids_list):
current_bomber = bids.get(bid)
bomber_app_logs = app_logs.get(bid, {})
bomber_start = bomber_end
bomber_end = bomber_start + average_num[index]
bomber_apps = apps[bomber_start:bomber_end]
from_p, from_np, from_ids, status = [], [], [], 0
for ba in bomber_apps:
promised_date = ba.get('promised_date')
from_ids.append(ba['id'])
if promised_date and promised_date.date() >= date.today():
from_p.append(ba['id'])
else:
from_np.append(ba['id'])
app_status = ApplicationStatus.AB_TEST.value
if (cycle == Cycle.C1A.value and not out_partner and type ==
ApplicationType.CASH_LOAN.value):
app_status = ApplicationStatus.PROCESSING.value
if from_p:
p = Application.update(ptp_bomber=bid, latest_bomber=bid,
status=app_status).where(Application.id << from_p).execute(
)
p_ids = bomber_app_logs.get('p_ids', []) + from_p
bomber_app_logs['p_ids'] = p_ids
if from_np:
np = Application.update(latest_bomber=bid, ptp_bomber=None,
status=ApplicationStatus.AB_TEST.value).where(
Application.id << from_np).execute()
np_ids = bomber_app_logs.get('np_ids', []) + from_np
bomber_app_logs['np_ids'] = np_ids
in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.
partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}
if type == ApplicationType.CASH_LOAN.value:
out_param = {'src_bomber_id': bid, 'application_ids':
bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}
new_out_record(**out_param)
new_in_record(**in_param)
else:
out_and_in_record_instalment(**in_param)
bomber_app_logs['status'] = 1
need_num = bomber_app_logs.get('need_num', 0) + average_num[index]
bomber_app_logs['need_num'] = need_num
all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids
bomber_app_logs['form_ids'] = all_form_ids
if not out_partner:
continue
try:
DispatchApp.delete().where(DispatchApp.application.in_(
from_ids)).execute()
dispatch_ins = [{'application': id, 'partner':
current_bomber.partner_id, 'bomber': bid, 'status':
DisAppStatus.NORMAL.value} for id in from_ids]
DispatchApp.insert_many(dispatch_ins).execute()
except Exception as e:
logging.info(
'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str
(e), bid, from_ids))
<|reserved_special_token_0|>
def calc_entry_time(overdue_days):
app_entry_time = {}
overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],
'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}
for key, value in overdue_entry.items():
if value[0] <= overdue_days <= value[1]:
app_entry_time[key] = datetime.now()
else:
app_entry_time[key] = None
return app_entry_time
<|reserved_special_token_0|>
@action(MessageAction.SUMMARY_BOMBER_OVERDUE)
def summary_bomber_overdue_everyday(payload, msg_id):
cycle_list = Cycle.values()
which_day = date.today()
for cycle in cycle_list:
apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,
ApplicationR.ptp_bomber, ApplicationR.overdue_days,
ApplicationR.promised_date, ApplicationR.follow_up_date,
ApplicationR.external_id, OverdueBillR.status, OverdueBillR.
periods, OverdueBillR.sub_bill_id).join(OverdueBillR,
JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id
).where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()
bomber_overdue_list = []
for app in apps:
status = app.get('status')
if status == ApplicationStatus.REPAID.value:
continue
ptp_bomber = app.get('ptp_bomber')
promised_date = app.get('promised_date')
follow_up_date = app.get('follow_up_date')
if not promised_date or promised_date.date() < date.today():
ptp_bomber = promised_date = None
if not follow_up_date or follow_up_date.date() < date.today():
follow_up_date = None
overdue_dict = {'collection_id': app.get('id'), 'external_id':
app.get('external_id'), 'sub_bill_id': app.get(
'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app
.get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':
ptp_bomber, 'promised_date': promised_date,
'follow_up_date': follow_up_date, 'which_day': which_day,
'overdue_days': app.get('overdue_days')}
bomber_overdue_list.append(overdue_dict)
try:
if bomber_overdue_list:
with db.atomic():
for index in range(0, len(bomber_overdue_list), 1000):
insert_list = bomber_overdue_list[index:index + 1000]
BomberOverdue.insert_many(insert_list).execute()
except Exception as e:
logging.info(
'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %
(cycle, str(which_day), str(e)))
<|reserved_special_token_0|>
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)
def today_ptp_auto_call_switch(payload, msg_id):
today = datetime.today().date()
next_day = today + timedelta(days=1)
apps = ApplicationR.select(ApplicationR.latest_bomber).where(
ApplicationR.promised_date < next_day, ApplicationR.promised_date >=
today, ApplicationR.promised_date.is_null(False), ApplicationR.
status != ApplicationStatus.REPAID.value, ApplicationR.cycle <
Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(
ApplicationR.latest_bomber)
bids = [a.latest_bomber_id for a in apps]
if not bids:
return
q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(
BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids
).execute()
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)
def update_today_switch_every_day(payload, msg_id):
q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(
BomberPtp.auto_ext.is_null(False)).execute()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import traceback
from functools import partial
import json
import logging
from collections import defaultdict
from itertools import cycle as CycleIter
from datetime import datetime, date, timedelta
from decimal import Decimal
import random
from copy import deepcopy
from math import ceil
import boto3
import bottle
from peewee import fn, SQL, JOIN_LEFT_OUTER, JOIN_INNER, R
from mongoengine import Q
from deprecated.sphinx import deprecated
from bomber.api import (
AccountService,
MessageService,
AuditService,
BillService,
Dashboard,
GoldenEye,
Hyperloop,
Message,
Scout)
from bomber.constant_mapping import (
AutoCallMessageCycle,
ApplicationStatus,
RealRelationship,
BomberCallSwitch,
CallActionCommit,
ApplicantSource,
ApplicationType,
EscalationType,
ApprovalStatus,
AutoListStatus,
AutoCallResult,
BeforeInBomber,
PriorityStatus,
InboxCategory,
OldLoanStatus,
BombingResult,
ContactStatus,
SpecialBomber,
PartnerStatus,
Relationship,
ConnectType,
SubRelation,
PhoneStatus,
ContactType,
SmsChannel,
ContainOut,
FIRSTLOAN,
AppName,
RipeInd,
Cycle,
ContactsUseful,
DisAppStatus,
BomberStatus,
PartnerType)
from bomber.controllers.templates import cs_number_conf
from bomber.controllers.report_calculation.collection_tool import (
average_call_duration_team
)
from bomber.controllers.report_calculation.collection_agent import get_agent
from bomber.db import db, readonly_db
from bomber.models_readonly import (
DispatchAppHistoryR,
AutoCallActionsR,
ConnectHistoryR,
ApplicationR,
CallActionsR,
OverdueBillR,
BomberR)
from bomber.models import (
ManualCallListStatus,
RepaymentReportInto,
OldLoanApplication,
DispatchAppHistory,
CompanyContactType,
FamilyContactType,
ReportCollection,
RepaymentReport,
AutoCallActions,
DispatchAppLogs,
ConnectHistory,
BombingHistory,
ManualCallList,
AutoIVRActions,
SummaryBomber,
SummaryDaily,
IVRCallStatus,
BomberOverdue,
AutoCallList,
AutoIVRStatus,
SystemConfig,
RepaymentLog,
IVRActionLog,
TotalContact,
Application,
CallActions,
DispatchApp,
OverdueBill,
Escalation,
BomberPtp,
WorkerLog,
BomberLog,
CycleList,
Template,
Transfer,
Summary2,
AutoIVR,
Partner,
Contact,
CallLog,
Summary,
Bomber,
Inbox,
Role,
SCI,
)
from bomber.sns import MessageAction, send_to_default_q
from bomber.utils import (
get_cycle_by_overdue_days,
str_no_utc_datetime,
no_utc_datetime,
gender_ktpnum,
list_to_dict,
birth_dt_ktp,
number_strip,
utc_datetime,
OperatedDict,
average_gen,
time_logger,
idg,
)
from bomber.report_work import get_every_cycle_report
app = bottle.default_app()
client = boto3.client('sqs')
#对外展示dict,key-函数名;v-函数数组
actions = {}
def action(msg_action):
action_name = msg_action.value.lower()
if action_name not in actions:
actions[action_name] = []
def wrapper(func):
actions[action_name].append(func)
return func
return wrapper
@action(MessageAction.BOMBER_HEALTH_CHECK)
def health_check(payload, msg_id):
pass
def dpd1_classify(item, lst):
app_name = str(item['app_name']).upper()
key = '{}_{}_DPD1'.format(app_name, str(item['su']))
if key in BeforeInBomber.keys():
lst[key].append(item['id'])
return lst
def dpd1_process(lst):
"""已废弃的方法"""
if not lst:
return
for key, l in lst.items():
rule = getattr(BeforeInBomber, key).value
query = (AutoIVRActions
.select(fn.DISTINCT(AutoIVRActions.loanid))
.where(AutoIVRActions.loanid.in_(l),
AutoIVRActions.group.in_(rule.get('group')),
AutoIVRActions.callstate
.in_(IVRCallStatus.call_success())))
success_set = {i.loanid for i in query}
failed_list = list(set(l) - success_set)
post_params = {
'$and': rule.get('$and'),
'app_list': failed_list
}
resp = Hyperloop().post("/bomber/score/verify", json=post_params)
if not resp.ok:
logging.error(
'hyperloop score verification failed: %s, %s',
str(resp.status_code),
str(resp.text)
)
logging.error('hyperloop score verification failed: %s',
str(post_params))
continue
logging.debug('hyperloop score verification success: %s', resp.content)
resp_json = resp.json()
# dpd1 提前进入bomber
app_list = resp_json['data']
if not app_list:
continue
for item in app_list:
# 做ab_test,三分之一的人提前入催
if random.randint(0, 5) == 1:
send_to_default_q(
MessageAction.APPLICATION_BOMBER,
{'id': int(item)}
)
# auto_ivr,自动外呼系统
@action(MessageAction.GET_IVR)
def get_ivr(payload, msg_id):
logging.warning('start get_ivr')
sys_config = (SystemConfig.select()
.where(SystemConfig.key == 'DPD1-3_INTO_IVR')
.first())
# 得到所有的lid
now = date.today()
# 预期用户不再使用ivr,而是直接进入催收,故修改时间窗口不再获取预期数据
if sys_config and sys_config.value:
start = now - timedelta(days=3)
else:
start = now
end = now + timedelta(days=4)
# TODO: 使用redis
item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()
if not item:
# 开始时清空ivr数据
AutoIVR.delete().execute()
current_page = 0
elif item.current_page >= item.total_page:
return
else:
current_page = item.current_page
#逾期分组 appname + 逾期次数 + 逾期天数
auto_ivr = {
'DanaCepat01': 1,
'DanaCepat00': 2,
'DanaCepat0PDP1': 3,
'PinjamUang01': 4,
'PinjamUang00': 5,
'PinjamUang0PDP1': 6,
'KtaKilat01': 7,
'KtaKilat00': 8,
'KtaKilat0PDP1': 9,
'DanaCepat11': 10,
'DanaCepat10': 11,
'DanaCepat1PDP1': 12,
'PinjamUang11': 13,
'PinjamUang10': 14,
'PinjamUang1PDP1': 15,
'KtaKilat11': 16,
'KtaKilat10': 17,
'KtaKilat1PDP1': 18,
'DanaCepat0PDP2': 19,
'DanaCepat0PDP3': 20,
'DanaCepat03': 21,
'PinjamUang0PDP2': 22,
'PinjamUang0PDP3': 23,
'PinjamUang03': 24,
'KtaKilat0PDP2': 25,
'KtaKilat0PDP3': 26,
'KtaKilat03': 27,
'DanaCepat1PDP2': 28,
'DanaCepat1PDP3': 29,
'PinjamUang1PDP2': 30,
'PinjamUang1PDP3': 31,
'KtaKilat1PDP2': 32,
'KtaKilat1PDP3': 33,
'DanaCepat13': 36,
'PinjamUang13': 37,
'KtaKilat13': 38,
'DanaCepat12': 39,
'PinjamUang12': 40,
'KtaKilat12': 41,
'DanaCepat02': 42,
'PinjamUang02': 43,
'KtaKilat02': 44,
'IKIDana01': 100,
'IKIDana00': 101,
'IKIDana0PDP1': 102,
'IKIDana11': 103,
'IKIDana10': 104,
'IKIDana1PDP1': 105,
'IKIDana0PDP2': 106,
'IKIDana0PDP3': 107,
'IKIDana03': 108,
'IKIDana1PDP2': 109,
'IKIDana1PDP3': 110,
'IKIDana13': 111,
'IKIDana12': 112,
'IKIDana02': 113,
}
current_page += 1
with db.atomic() as transaction:
while True:
bill_service = BillService()
#获取当天到未来4天的到期bill_sub.origin_due_at
ivr_action = bill_service.ivr_pages(
page=current_page,
page_size=500,
start_time=utc_datetime(str(start)),
end_time=utc_datetime(str(end)))
result = ivr_action['result']
page_size = int(ivr_action.get('page_size', 0))
total_page = int(ivr_action.get('total_page', 0))
insert_args = []
for a in result:
due_at = no_utc_datetime(a['due_at'])
days = (due_at.date() - now).days
if days == 2:
continue
if days > 0:
time = str(days)
else:
# 上面通过时间控制了请求的数据,不会获取到逾期为两天的件
time = str(days).replace('-', 'PDP')
#su 该用户逾期多少次
key = a['app_name'] + str(a['su']) + time
group = auto_ivr.get(key)
user_id = a['user_id']
try:
user_resp = (AccountService()
.get_user(path_params={'user_id': user_id}))
if str(user_resp['mobile_no']) == str(a['user_mobile_no']):
numbers = a['user_mobile_no']
else:
numbers = (a['user_mobile_no'] +
',' + user_resp.get('mobile_no'))
except:
logging.error('Request Account Service Error.')
numbers = a['user_mobile_no']
insert_args.append({
'application_id': a['id'],
'numbers': numbers,
'group': group,
'user_id': user_id})
AutoIVR.insert_many(insert_args).execute()
if current_page == 1:
IVRActionLog.create(total_page=total_page,
proc_date=now,
page_size=page_size,
current_page=current_page)
# 不知道什么原因,此处create不返回刚创建的对象
item = IVRActionLog.get(IVRActionLog.proc_date == now)
else:
item.current_page = current_page
item.page_size = page_size
item.total_page = total_page
item.save()
transaction.commit()
current_page += 1
if current_page > int(total_page):
break
# try:
# ivr_t2_test()
# except Exception as e:
# logging.error("ivr_test_error:%s"%str(e))
if sys_config and sys_config.value:
try:
classfiy_dpd_ptp_apps()
except Exception as e:
logging.error("dpd1-3_test_error:%s"%str(e))
# t-2进ivr测试代码
def ivr_t2_test():
t2_groups = [39, 40, 41, 42, 43, 44]
ivr_test_proportion = 0.2
sys_config = (SystemConfig.select()
.where(SystemConfig.key == 'IVR_TEST_PROPORTION')
.first())
if sys_config and sys_config.value:
ivr_test_proportion = float(sys_config.value)
# 获取所有t-2的件
t2_ivrs = (AutoIVR.select()
.where(AutoIVR.group << t2_groups,
AutoIVR.status == AutoIVRStatus.AVAILABLE.value))
t2_dict = defaultdict(list)
# 每个group获取一定比例的件
for ivr in t2_ivrs:
t2_dict[ivr.group].append(ivr.id)
test_ivr_ids = []
for group, ivr_ids in t2_dict.items():
number = ceil(len(ivr_ids) * ivr_test_proportion)
test_ivr_ids += ivr_ids[:number]
if not test_ivr_ids:
return
# 更新ivr状态
q = (AutoIVR.update(status=AutoIVRStatus.SUCCESS.value)
.where(AutoIVR.group << t2_groups,
AutoIVR.id.not_in(test_ivr_ids))
.execute())
# 过滤到bomber中下p的件
def classfiy_dpd_ptp_apps():
dpd_group = AutoIVR.dpd_groups()
dpd1_3_ivr_pro = 0.2
sys_config = (SystemConfig.select()
.where(SystemConfig.key == 'DPD1-3_IVR_TEST')
.first())
if sys_config and sys_config.value:
dpd1_3_ivr_pro = float(sys_config.value)
# 获取有是有已经下p的件
apps = (ApplicationR.select(ApplicationR.external_id)
.where(ApplicationR.overdue_days < 4,
ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.promised_date >= date.today(),
ApplicationR.promised_date.is_null(False)))
apps_ids = [a.external_id for a in apps]
# 删除ivr中下p的件
if apps_ids:
d = (AutoIVR.delete()
.where(AutoIVR.application_id.in_(apps_ids),
AutoIVR.group.in_(dpd_group))
.execute())
# 所有dpd1-3的件
ivrs = (AutoIVR.select().where(AutoIVR.group.in_(dpd_group)))
ivrs_dict = defaultdict(list)
for ivr in ivrs:
ivrs_dict[ivr.group].append(ivr.id)
test_ivrs = []
for group, ivr_ids in ivrs_dict.items():
number = ceil(len(ivr_ids) * dpd1_3_ivr_pro)
test_ivrs += ivr_ids[:number]
if not test_ivrs:
return
# 更新不测试的数据的状态
q = (AutoIVR.update(status=AutoIVRStatus.SUCCESS.value)
.where(AutoIVR.group.in_(dpd_group),
AutoIVR.id.not_in(test_ivrs))
.execute())
# APP 合并特殊处理
@action(MessageAction.APP_MERGE)
@deprecated(version='1.0', reason='This function will be removed soon')
def app_merge(payload, msg_id):
# 将DPD未到4的提前拉近bomber
sql = """
select *
from (
select a.id as id
from dashboard.application as a
inner join repayment.bill2 as b on b.external_id = a.id
where not exists (
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.app = 'DanaCepat'
and a.is_first_loan = 1
and a.apply_at < '2018-08-23 20:50:00'
and b.overdue_days between 1 and 3
and b.status != 2) result
where not exists (
select 1
from bomber.application as a
where a.cycle = 1
and a.status = 4
and a.id = result.id
)
"""
cursor = readonly_db.get_cursor()
cursor.execute(sql)
new_data = cursor.fetchall()
cursor.close()
if new_data:
bomber = [103, 104]
for d in new_data:
app_id = {'id': d[0]}
application_overdue(app_id, None)
# 将新进的件随机分给对应催收员
(Application
.update(status=ApplicationStatus.AB_TEST.value,
latest_bomber=random.choice(bomber),
ptp_bomber=None
)
.where(Application.id == d[0])
).execute()
logging.warning('add new app success')
# 重新登陆后,且没有ptp,将其从人工催收中删除
ptp = date.today() - timedelta(days=1)
del_sql = """
select a.id
from bomber.application as a
where exists(
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.cycle = 1
and a.status = 4
and (a.promised_date is null or a.promised_date < "%s")
""" % ptp
cursor = readonly_db.get_cursor()
cursor.execute(del_sql)
del_date = cursor.fetchall()
cursor.close()
if del_date:
return
ids = list()
for d in del_date:
ids.append(d[0])
(Application
.update(status=ApplicationStatus.UNCLAIMED.value,
latest_bomber=None)
.where(Application.id << ids)).execute()
@action(MessageAction.APPLICATION_BOMBER)
def application_overdue(payload, msg_id):
application_id = payload['id']
sub_bill_id = payload['bill_sub_id']
local_app = (Application.select()
.where(Application.external_id == application_id)
.order_by(Application.finished_at)
.first())
# 如果是单期且催收单存在
if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:
logging.info('application %s overdue, already exists', application_id)
add_contact(local_app)
return
# 如果是分期,查看子账单是否存在
if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:
overdue_bill = (OverdueBillR.select()
.where(OverdueBillR.sub_bill_id == sub_bill_id,
OverdueBillR.external_id == application_id))
if overdue_bill.exists():
logging.info(
'application %s,sub_bill_id %s overdue, already exists' %
(application_id, sub_bill_id))
return
try:
sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
sub_bill = sub_bill[0]
except Exception:
logging.error('application %s overdue, get sub_bill info failed:'
'Request To repayment Error', application_id)
return
if sub_bill['status'] == 2:
logging.error('application %s overdue, but bills already cleared',
application_id)
return
overdue_days = sub_bill.get('overdue_days', 0)
if overdue_days == 0:
logging.info('application {} no overdue'
.format(str(application_id)))
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
logging.error('get application %s failed: Request to GoldenEye.',
application_id)
return
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
apply_history = Dashboard().get('/users/%s/apply-history' % user_id)
if not apply_history.ok:
logging.error('get user %s apply history failed: Request '
'to Dashboard Failed.', user_id)
return
history = apply_history.json().get('data')
loan_success_times = len([1 for i in history
if i['status'] in [80, 90, 100, 70] and
i['id'] != gold_app['id']])
id = application_id
type = ApplicationType.CASH_LOAN.value
bill_id = sub_bill.get("bill_id")
amount = sub_bill.get("amount")
amount_net = sub_bill.get('amount_net')
interest_rate = sub_bill.get('interest_rate')
overdue_days = sub_bill.get('overdue_days')
origin_due_at = sub_bill.get('origin_due_at')
sub_overdue_bill = {
"collection_id": id,
"bill_id": bill_id,
"sub_bill_id": sub_bill_id,
"periods": sub_bill.get("periods"),
"overdue_days": overdue_days,
"origin_due_at": origin_due_at,
"amount": amount,
"amount_net": amount_net,
"interest_rate": interest_rate,
"external_id": application_id
}
# 根据催收单类型来生成id
if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:
if local_app and local_app.status != ApplicationStatus.REPAID.value:
sub_overdue_bill["collection_id"] = local_app.id
local_app.amount += amount
local_app.amount_net += amount_net
local_app.save()
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info(
"application %s,sub_bill_id:%s overdue created" %
(application_id, sub_bill_id))
return
else:
id = idg()
type = ApplicationType.CASH_LOAN_STAGING.value
sub_overdue_bill["collection_id"] = id
ptp_info = BombingHistory.filter(BombingHistory.application == id).first()
promised_amount = ptp_info and ptp_info.promised_amount
promised_date = ptp_info and ptp_info.promised_date
application = Application.create(
id=id,
user_id=gold_app['user_id'],
user_mobile_no=gold_app['user_mobile_no'],
user_name=gold_app['id_name'],
app=gold_app['app'],
device_no=gold_app['device_no'],
contact=json.dumps(gold_app.get('contact')),
apply_at=gold_app.get('apply_date'),
id_ektp=gold_app.get('id_ektp'),
birth_date=birth_dt_ktp(gold_app.get('id_ektp')),
gender=gender_ktpnum(gold_app.get('id_ektp')),
profile_province=(gold_app.get('profile_province') or {}).get('name'),
profile_city=(gold_app.get('profile_city') or {}).get('name'),
profile_district=(gold_app.get('profile_district') or {}).get('name'),
profile_residence_time=gold_app.get('profile_residence_time'),
profile_residence_type=gold_app.get('profile_residence_type'),
profile_address=gold_app.get('profile_address'),
profile_education=gold_app.get('profile_education'),
profile_college=(gold_app.get('profile_college') or {}).get('name'),
job_name=gold_app.get('job_name'),
job_tel=gold_app.get('job_tel'),
job_bpjs=gold_app.get('job_bpjs'),
job_user_email=gold_app.get('job_user_email'),
job_type=gold_app.get('job_type'),
job_industry=gold_app.get('job_industry'),
job_department=gold_app.get('job_department'),
job_province=(gold_app.get('job_province') or {}).get('name'),
job_city=(gold_app.get('job_city') or {}).get('name'),
job_district=(gold_app.get('job_district') or {}).get('name'),
job_address=gold_app.get('job_address'),
amount=amount,
amount_net=amount_net,
interest_rate=interest_rate,
# late_fee_rate=bill.get('late_fee_rate'),
# late_fee_initial=late_fee_initial,
# late_fee=late_fee,
# interest=interest,
term=gold_app.get('term'),
origin_due_at=origin_due_at,
# due_at=bill.get('due_at'),
overdue_days=overdue_days,
repay_at=sub_bill.get('repay_at'),
# principal_paid=principal_paid,
# late_fee_paid=late_fee_paid,
# repaid=repaid,
# unpaid=unpaid,
loan_success_times=loan_success_times,
arrived_at=datetime.now(),
follow_up_date=datetime.now(),
promised_amount=promised_amount,
promised_date=promised_date,
external_id=application_id,
type=type,
bill_id=bill_id,
dpd1_entry=datetime.now()
)
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('overdue application %s created', application_id)
# new overdue application equals to 'escalate from 0 to 1'
Escalation.create(
application=id,
type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value,
current_cycle=0,
escalate_to=1,
)
add_contact(application)
def add_contact(application):
logging.info('start add contact for application: %s', application.id)
# 添加联系人信息
contacts = Contact.filter(
Contact.user_id == application.user_id,
)
existing_numbers = {contact.number for contact in contacts}
insert_contacts = list()
mon_insert_contact = {}
# applicant
user_mobile_no = number_strip(application.user_mobile_no)
if user_mobile_no and user_mobile_no not in existing_numbers:
insert_contacts.append({
'user_id': application.user_id,
'name': application.user_name,
'number': user_mobile_no,
'relationship': Relationship.APPLICANT.value,
'source': 'apply info',
'real_relationship': Relationship.APPLICANT.value
})
existing_numbers.add(number_strip(application.user_mobile_no))
extra_phone = GoldenEye().get(
'/users/%s/extra-phone' % application.user_id
)
if not extra_phone.ok:
extra_phone = []
logging.error('get user %s extra contacts failed',
application.user_id)
else:
extra_phone = extra_phone.json()['data']
if extra_phone:
for i in extra_phone:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({
'user_id': application.user_id,
'name': application.user_name,
'number': number,
'relationship': Relationship.APPLICANT.value,
'source': 'extra phone',
'real_relationship': Relationship.APPLICANT.value
})
key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
# family
# ec contact
ec_contact = []
contact = json.loads(application.contact or '[]')
for i in contact:
if (number_strip(i['mobile_no']) not in existing_numbers and
number_strip(i['mobile_no'])):
ec_contact.append({
'user_id': application.user_id,
'name': i['name'],
'number': number_strip(i['mobile_no']),
'relationship': Relationship.FAMILY.value,
'sub_relation': SubRelation.EC.value,
'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value
})
key = (user_mobile_no,
number_strip(i['mobile_no']),
ContactType.F_EC.value)
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['mobile_no']))
if i['type'] != 1:
continue
if (number_strip(i['tel_no']) not in existing_numbers and
number_strip(i['tel_no'])):
ec_contact.append({
'user_id': application.user_id,
'name': i['name'],
'number': number_strip(i['tel_no']),
'relationship': Relationship.FAMILY.value,
'sub_relation': SubRelation.EC.value,
'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value
})
key = (user_mobile_no,
number_strip(i['tel_no']),
ContactType.F_EC.value)
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['tel_no']))
if ec_contact:
Contact.insert_many(ec_contact).execute()
# company
if all((application.job_tel,
number_strip(application.job_tel),
number_strip(application.job_tel) not in existing_numbers)):
insert_contacts.append({
'user_id': application.user_id,
'name': None,
'number': number_strip(application.job_tel),
'relationship': Relationship.COMPANY.value,
'source': 'basic info job_tel',
'real_relationship': Relationship.COMPANY.value
})
key = (user_mobile_no,
number_strip(application.job_tel),
ContactType.C_BASIC_INFO_JOB_TEL.value)
mon_insert_contact[key] = 1, 0, None
existing_numbers.add(number_strip(application.job_tel))
# suggested
sms_contacts = GoldenEye().get(
'/applications/%s/sms-contacts' % application.external_id
)
if not sms_contacts.ok:
sms_contacts = []
logging.info('get user %s sms contacts failed', application.external_id)
else:
sms_contacts = sms_contacts.json()['data']
if sms_contacts:
for i in sms_contacts:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'][:128],
'number': number,
'relationship': Relationship.SUGGESTED.value,
'source': 'sms contacts',
'real_relationship': Relationship.SUGGESTED.value
})
key = (user_mobile_no,
number,
ContactType.S_SMS_CONTACTS.value)
mon_insert_contact[key] = 1, 0, i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
cf = GoldenEye().get(
'/applications/%s/call/frequency' % application.external_id
)
if not cf.ok:
call_frequency = []
logging.error('get application %s call frequency error',
application.external_id)
else:
call_frequency = cf.json()['data']
# 结构不一样,重新生成
insert_contacts = []
fm = GoldenEye().get(
'/applications/%s/contact/family-member' % application.external_id
)
if not fm.ok:
family = []
logging.error('get application %s family-member info error',
application.external_id)
else:
family = fm.json()['data']
if family:
for i in family:
if not (i.get('number')):
logging.info('family member %s' % str(i))
continue
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
logging.info('family members: %s' % str(i))
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'][:128],
'number': number,
'relationship': Relationship.FAMILY.value,
'source': FamilyContactType.CALLEC.value,
'total_count': i.get('total_count', 1),
'total_duration': i.get('total_duration', 0),
'real_relationship': Relationship.FAMILY.value
})
key = user_mobile_no, number, ContactType.F_CALL_EC.value
mon_insert_contact[key] = (i.get('total_count', 1),
i.get('total_duration', 0),
i['name'][:128])
existing_numbers.add(number)
mon_update_contact = {}
if call_frequency:
with db.atomic():
count = 1
for i in call_frequency:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
(Contact
.update(total_count=i['total_count'],
total_duration=i['total_duration'])
.where(Contact.number == number,
Contact.user_id == application.user_id))
key = user_mobile_no, number
mon_update_contact[key] = (i['total_count'],
i['total_duration'])
continue
# 设置通话频率最多的五个为family member
if count < 6:
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'][:128],
'number': number,
'relationship': Relationship.FAMILY.value,
'total_count': i['total_count'],
'total_duration': i['total_duration'],
'source': FamilyContactType.CALLTOP5.value,
'real_relationship': Relationship.FAMILY.value
})
count += 1
key = user_mobile_no, number, ContactType.F_CALL_TOP5.value
mon_insert_contact[key] = (i['total_count'],
i['total_duration'],
i['name'][:128])
else:
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'][:128],
'number': number,
'relationship': Relationship.SUGGESTED.value,
'total_count': i['total_count'],
'total_duration': i['total_duration'],
'source': 'call frequency',
'real_relationship': Relationship.SUGGESTED.value
})
key = (user_mobile_no,
number,
ContactType.S_CALL_FREQUENCY.value)
mon_insert_contact[key] = (i['total_count'],
i['total_duration'],
i['name'][:128])
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
# 信用认证号码加入到本人
next_apply_list = (AccountService().add_contact(application.user_id))
for next_apply in next_apply_list:
number = number_strip(str(next_apply))[:64]
if number and number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=application.user_name,
number=number,
relationship=Relationship.SUGGESTED.value,
source='online profile phone',
real_relationship=Relationship.SUGGESTED.value
)
key = (user_mobile_no,
number,
ContactType.S_ONLINE_PROFILE_PHONE.value)
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
# 双卡手机另一个号码加入到本人队列
next_applicant = GoldenEye().get(
'/bomber/%s/dual_contact' % application.user_id
)
if not next_applicant.ok:
next_applicant = []
logging.error('get user %s dual_contact contacts failed'
% application.user_id)
else:
next_applicant = next_applicant.json()['data']
if next_applicant:
for i in next_applicant:
number = number_strip(str(i))[:64]
if number and number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=application.user_name,
number=number,
relationship=Relationship.APPLICANT.value,
source='apply info',
real_relationship=Relationship.APPLICANT.value
)
key = user_mobile_no, number, ContactType.A_APPLY_INFO.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
# add new contact
# 将同个ktp注册的多个号码添加到本人
numbers = []
try:
numbers = (AccountService()
.ktp_number(path_params={'user_id': application.user_id}))
except Exception as e:
logging.info('request ktp numbers failed %s' % str(e))
for n in numbers:
number = number_strip(str(n))[:64]
if number and number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=application.user_name,
number=number,
relationship=Relationship.APPLICANT.value,
source='ktp number',
real_relationship=Relationship.APPLICANT.value
)
key = (user_mobile_no,
number,
ContactType.A_KTP_NUMBER.value)
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success'
% application.user_id)
# 将contact表中is_family为true的标记为ec
try:
ecs = GoldenEye().get(
'/applications/%s/contact/ec' % application.external_id
)
except Exception as e:
logging.info('request ec-member error: %s' % str(e))
try:
if not ecs.ok:
ec = []
logging.info('get application %s ec-member info error',
application.external_id)
else:
ec = ecs.json()['data']
if ec:
for e in ec:
number = number_strip(e['numbers'])[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=e['name'][:128],
number=number,
relationship=Relationship.FAMILY.value,
source=FamilyContactType.CONTACTEC.value,
real_relationship=Relationship.FAMILY.value
)
key = (user_mobile_no,
number,
ContactType.F_CONTACT_EC.value)
mon_insert_contact[key] = 1, 0, e['name'][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add ec_member error:%s' % str(e))
# 将contact中is_me标记为true的标记为本人
try:
mn = GoldenEye().get(
'/applications/%s/contact/my_number' % application.external_id
)
except Exception as e:
logging.info('request my_number error: %s' % str(e))
try:
if not mn.ok:
my = []
logging.info('get application %s my_number info error',
application.external_id)
else:
my = mn.json()['data']
if my:
for m in my:
number = number_strip(m)[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=my[m][:128],
number=number,
relationship=Relationship.SUGGESTED.value,
source='my number',
real_relationship=Relationship.SUGGESTED.value
)
key = user_mobile_no, number, ContactType.S_MY_NUMBER.value
mon_insert_contact[key] = 1, 0, my[m][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add my_member error:%s' % str(e))
# 得到company的号码
try:
cn = GoldenEye().get(
'/applications/%s/contact/company-number' % application.external_id
)
except Exception as e:
logging.info('request company-number error: %s' % str(e))
try:
if not cn.ok:
cn = []
logging.info('get application %s company_number info error',
application.external_id)
else:
cn = cn.json()['data']
if cn:
for c in cn:
number = c
if not number:
continue
if number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=cn[c][:128],
number=number,
relationship=Relationship.COMPANY.value,
source='company',
real_relationship=Relationship.COMPANY.value
)
key = user_mobile_no, number, ContactType.C_COMPANY.value
mon_insert_contact[key] = 1, 0, cn[c][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add company_member error:%s' % str(e))
# 得到本人在其他设备上登陆的sim联系方式,加入applicant中
try:
ol = (AccountService()
.other_login_contact(userId=application.user_id))
except Exception as e:
logging.error('request other_login error: %s' % e)
ol = {}
try:
for o in ol:
number = number_strip(o)
if not number:
continue
if number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=ol[o][:128],
number=number,
relationship=Relationship.SUGGESTED.value,
source='other_login',
real_relationship=Relationship.SUGGESTED.value
)
key = (user_mobile_no,
number,
ContactType.S_OTHER_LOGIN.value)
mon_insert_contact[key] = 1, 0, ol[o][:128]
except Exception as e:
logging.error('add other_login number error:%s' % e)
logging.info('add contact for application %s finished', application.id)
if mon_insert_contact or mon_update_contact:
send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON,
{
'user_mobile_no': user_mobile_no,
'insert_contact': str(mon_insert_contact),
'update_contact': str(mon_update_contact),
'user_id': application.user_id,
'name': application.user_name
})
@action(MessageAction.IMPORT_CONTACT_TO_MON)
def import_contact_to_mon(payload, msg_id):
user_mobile_no = payload['user_mobile_no']
insert_contact = eval(payload['insert_contact'])
update_contact = eval(payload['update_contact'])
user_id = payload['user_id']
name = payload['name']
if not (insert_contact or update_contact or user_mobile_no):
logging.error("Invalid params")
drop_duplicated_contact({'numbers': [user_mobile_no]}, None)
send_to_default_q(MessageAction.CONTACT_FROM_TOTAL, {
'number': user_mobile_no,
'user_id': user_id
})
return
contacts = TotalContact.objects(src_number=user_mobile_no, is_calc=False)
insert_list = []
for c in contacts:
key = (user_mobile_no, c.dest_number, c.source)
if key in insert_contact:
insert_contact.pop(key)
for (sn, dn, s), (tc, td, na) in insert_contact.items():
insert_list.append({
'src_number': sn,
'src_name': name,
'dest_number': dn,
'dest_name': na,
'source': s,
'total_count': tc,
'total_duration': td
})
if insert_list:
insert_count = len((TotalContact
.objects
.insert([TotalContact(**dct)
for dct in insert_list])))
logging.info("insert success %s", insert_count)
update_count = 0
for (sn, dn), (tc, td) in update_contact.items():
result = (TotalContact
.objects(src_number=sn, dest_number=dn, is_calc=False)
.update(total_count=tc, total_duration=td))
if result:
update_count += 1
logging.info("update success %s", update_count)
drop_duplicated_contact({'numbers': [user_mobile_no]}, None)
send_to_default_q(MessageAction.CONTACT_FROM_TOTAL, {
'number': user_mobile_no,
'user_id': user_id
})
@action(MessageAction.DROP_DUPLICATED_CONTACT)
def drop_duplicated_contact(payload, msg_id):
"""
total_count,total_duration去重时,先total_count, 后total_duration
:param payload:
:param msg_id:
:return:
"""
numbers = payload.get('numbers', [])
if not numbers:
logging.error("no numbers should drop")
query = (TotalContact
.objects(Q(src_number__in=numbers) | Q(dest_number__in=numbers)))
contact_list = defaultdict(list)
delete_list = []
insert_list = []
for c in query:
if c.src_number == c.dest_number:
delete_list.append(c.id)
key = c.src_number, c.dest_number, c.source
contact_list[key].append({
'id': c.id,
'src_number': c.src_number,
'dest_number': c.dest_number,
'total_count': c.total_count,
'total_duration': c.total_duration,
'is_calc': c.is_calc,
'source': c.source,
'src_name': c.src_name,
'dest_name': c.dest_name
})
contact_list2 = deepcopy(contact_list)
for key, info in contact_list.items():
_info = sorted(info,
key=lambda x: (not x['is_calc'],
x['total_count'],
x['total_duration']),
reverse=True)
rs = _info[0]
if not rs['is_calc']:
contact_list2[(key[1], key[0], key[2])].append({
'src_number': rs['dest_number'],
'dest_number': rs['src_number'],
'total_count': rs['total_count'],
'total_duration': rs['total_duration'],
'is_calc': True,
'source': rs['source'],
'id': '',
'src_name': rs['dest_name'],
'dest_name': rs['src_name']
})
delete_ids = [i['id'] for i in _info[1:] if i['id']]
delete_list.extend(delete_ids)
for key, info in contact_list2.items():
_info = sorted(info,
key=lambda x: (not x['is_calc'],
x['total_count'],
x['total_duration']),
reverse=True)
rs = _info[0]
# 第一轮已经把不是反转的号码全部刷过
if not rs['is_calc']:
continue
if not rs['id']:
rs.pop('id')
insert_list.append(rs)
delete_ids = [i['id'] for i in _info[1:] if i['id']]
delete_list.extend(delete_ids)
if delete_list:
delete_count = TotalContact.objects(id__in=delete_list).delete()
logging.info("numbers %s: delete success %s", numbers, delete_count)
if insert_list:
insert_count = len((TotalContact
.objects
.insert([TotalContact(**dct)
for dct in insert_list])))
logging.info("numbers %s: insert success %s", numbers, insert_count)
def get_contact_from_mongo(number):
if not number:
return []
query = (TotalContact
.objects(src_number=number,
source__in=TotalContact.available())
.order_by('source'))
lst = []
for c in query:
relation = TotalContact.relationship(c.source)
if relation == -1:
continue
source = TotalContact.str_source(c.source)
if not source:
continue
lst.append({
'related_number': c.dest_number,
'source': source,
'is_calc': c.is_calc,
'total_count': c.total_count,
'total_duration': c.total_duration,
'relation': relation,
'name': c.dest_name
})
return lst
@action(MessageAction.CONTACT_FROM_TOTAL)
def contact_from_total(payload, msg_id):
number = payload.get('number')
user_id = payload.get('user_id')
if not (number and user_id):
logging.error("Invalid params")
return
result = get_contact_from_mongo(number)
if not result:
logging.error("contact from mongo is none")
return
contacts = Contact.filter(Contact.user_id == user_id)
existing_numbers = {contact.number for contact in contacts}
contact_list = []
for c in result:
number = number_strip(c['related_number'])
if number in existing_numbers:
continue
contact_list.append({
'user_id': user_id,
'name': c['name'],
'number': number,
'relationship': c['relation'],
'source': c['source'],
'total_duration': c['total_duration'],
'total_count': c['total_count'],
'real_relationship': c['relation']
})
existing_numbers.add(number)
if contact_list:
Contact.insert_many(contact_list).execute()
@action(MessageAction.BILL_REVOKE)
def bill_revoke(payload, msg_id):
application_id = payload['external_id']
if 'bill_sub_id' not in payload:
bill_revoke_old(application_id)
return
# 子账单id
sub_bill_id = payload['bill_sub_id']
# java中还款时的唯一标志
partner_bill_id = payload['partner_bill_id']
application = (Application
.filter(Application.external_id == application_id).first())
if application.type == ApplicationType.CASH_LOAN_STAGING.value:
# 根据子账单获取催收单的id
application = (Application.select(Application)
.join(OverdueBill,JOIN_LEFT_OUTER,
on = Application.id == OverdueBill.collection_id)
.where(OverdueBill.external_id == application_id,
OverdueBill.sub_bill_id == sub_bill_id)
.first())
if not application:
logging.info('application %s paid, not found application',
application_id)
return
try:
bill = BillService().sub_bill_list(bill_sub_ids = [sub_bill_id])
bill = bill[0]
except Exception:
logging.error('application %s overdue, get bill info failed: '
'Request To Repayment Error', application_id)
raise RuntimeError('Get repayment bills failed. {}'
.format(str(application.id)))
if bill.get('overdue_days') > 0 and bill.get('status') != 2:
Application.update(
status=ApplicationStatus.UNCLAIMED.value
).where(Application.id == application.id).execute()
# 获取子账单
overdue_bill = (OverdueBill
.filter(OverdueBill.external_id == application_id,
OverdueBill.sub_bill_id == sub_bill_id)
.first())
if not overdue_bill:
logging.info("not find overdue_bill,sub_bill_id:%s,appid:%s" %
(sub_bill_id, application_id))
return
if overdue_bill.status == ApplicationStatus.REPAID.value:
overdue_bill.status = ApplicationStatus.UNCLAIMED.value
overdue_bill.finished_at = None
overdue_bill.save()
# 还款记录要置为无效
RepaymentLog.update(
no_active = 1
).where(RepaymentLog.partner_bill_id == partner_bill_id,
RepaymentLog.overdue_bill_id == overdue_bill.id).execute()
# 老数据消息处理
def bill_revoke_old(external_id):
application = (Application.select()
.where(Application.id == external_id)
.first())
if not application:
logging.info("not get application")
return
try:
bill = BillService().bill_dict(
application_id=external_id)
except Exception:
logging.error('application %s overdue, get bill info failed: '
'Request To Repayment Error', external_id)
return
if bill.get('overdue_days') >0 and bill.get("status") != 2:
q = (Application
.update(status=ApplicationStatus.UNCLAIMED.value,
repay_at=bill.get('repay_at'))
.where(Application.id == external_id).execute())
p = (OverdueBill.update(status=ApplicationStatus.UNCLAIMED.value)
.where(OverdueBill.collection_id == external_id).execute())
return
def check_key_not_none(payload, keys):
for key in keys:
if payload.get(key) is None:
logging.error('Missing args {}'.format(str(key)))
return False
return True
# 还款
@action(MessageAction.BILL_PAID)
def bill_paid(payload, msg_id):
# Don't use validator, it will throw exception
validate = check_key_not_none(payload,
['external_id', 'late_fee_part',
'principal_part', 'paid_at','bill_sub_id',
'partner_bill_id'])
if not validate:
logging.error('payload key not fully pass in.')
return
external_id = payload['external_id']
late_fee_part = Decimal(payload['late_fee_part'])
principal_part = Decimal(payload['principal_part'])
paid_at = payload['paid_at']
partner_bill_id = payload['partner_bill_id']
logging.debug('application %s paid principal part %s, paid late fee '
'part %s', external_id, principal_part, late_fee_part)
application = (Application
.filter(Application.external_id == external_id)
.order_by(-Application.created_at)
.first())
if not application:
logging.info('application %s paid, not found application',external_id)
return
# 获取期数
sub_bill_id = payload['bill_sub_id']
overdue_bill = (OverdueBillR.select()
.where(OverdueBillR.collection_id == application.id,
OverdueBillR.sub_bill_id == sub_bill_id)
.first())
if (application.type == ApplicationType.CASH_LOAN_STAGING.value
and not overdue_bill):
logging.info("bill sub not in bomber %s",sub_bill_id)
return
with db.atomic():
repay_at = str_no_utc_datetime(payload['latest_repay_at'])
Application.update(
repay_at=repay_at
).where(Application.id == application.id).execute()
# 预测呼出系统上线后 全部认为 is_bombed = True
RepaymentLog.create(
application=application.id,
is_bombed=True,
current_bomber=application.latest_bomber_id,
cycle=application.cycle,
principal_part=principal_part,
late_fee_part=late_fee_part,
repay_at=paid_at,
ptp_bomber=application.ptp_bomber,
latest_call=application.latest_call,
periods=overdue_bill.periods if overdue_bill else None,
overdue_bill_id=overdue_bill.id if overdue_bill else None,
partner_bill_id=partner_bill_id
)
# 智能催收 —— 催收号码进行排序
phone_status = PhoneStatus.CONNECTED.value
real_relationship = RealRelationship.user_values()
commit = CallActionCommit.NO.value
number = (CallActions.select(CallActions.number)
.where(CallActions.phone_status == phone_status,
CallActions.real_relationship << real_relationship,
CallActions.commit == commit,
CallActions.application == application.id)
.order_by(-CallActions.created_at)
.first())
if number:
(Contact.update(call_priority=PriorityStatus.REPAY.value)
.where(Contact.user_id == application.user_id,
Contact.call_priority == PriorityStatus.LAST.value)
).execute()
(Contact.update(call_priority=PriorityStatus.LAST.value)
.where(Contact.user_id == application.user_id,
Contact.number == number.number)
).execute()
if not application.latest_bomber_id:
return
Inbox.create(
title='application %s,sub_bill_id %s repaid' % (
application.external_id, sub_bill_id),
content='application %s,sub_bill_id %s repaid' % (
application.external_id, sub_bill_id),
receiver=(application.latest_bomber_id or
application.last_bomber_id),
category=InboxCategory.REPAID.value,
)
@action(MessageAction.BILL_RELIEF)
def bill_relief(payload, msg_id):
"""已废弃"""
bill = payload['head_bill']
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_row = Application.update(
repay_at=repay_at,
).where(Application.id == bill['external_id']).execute()
logging.info('application %s bill relief done', bill['external_id'])
return updated_row
# 还款完成,
@action(MessageAction.BILL_CLEARED)
@action(MessageAction.BILL_CLEARED_BEFORE_CONFIRM)
def bill_cleared(payload, msg_id):
"""
BILL_CLEARED_BEFORE_CONFIRM仅在bomber系统中使用,MST清除账单时先修改其状态
为还款完成,让其不被催收
"""
external_id = payload.get('external_id')
sub_bill_id = payload.get('bill_sub_id')
if not external_id:
logging.warning('payload has no external_id. {}'.format(str(payload)))
return
# 如果还清,清除不在拨打ivr
AutoIVR.update(
status=AutoIVRStatus.REPAID.value
).where(AutoIVR.application_id == external_id).execute()
try:
bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
bill = bill[0]
except Exception:
logging.error('get bill info failed: '
'Request To Repayment Error', external_id)
return
application = Application.filter(
Application.external_id == external_id,
Application.status << [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.BAD_DEBT.value,
ApplicationStatus.AB_TEST.value]
).first()
if not application:
logging.info('application %s repay clear, not found bomber record',
external_id)
return
with db.atomic():
# 修改本次还清的自账单状态
sub_bill_update = (OverdueBill.update(
status = ApplicationStatus.REPAID.value,
finished_at = datetime.now())
.where(OverdueBill.collection_id == application.id,
OverdueBill.sub_bill_id == sub_bill_id)
.execute())
# 如果是分期的件,判断是否完成还款
overdue_bill = (OverdueBill.select()
.where(OverdueBill.collection_id == application.id,
OverdueBill.status != 2,
OverdueBill.sub_bill_id != sub_bill_id))
if overdue_bill.exists():
if application.latest_bomber_id:
Inbox.create(
title='application %s sub_bill_id %s cleared' % (
application.external_id, sub_bill_id),
content='application %s sub_bill_id %s cleared' % (
application.external_id, sub_bill_id),
receiver=application.latest_bomber_id,
category=InboxCategory.CLEARED.value,
)
return
# 还款完成同步更新到外包
partner = DispatchApp.filter(DispatchApp.application == application.id)
if partner.exists():
DispatchApp.update(
status=DisAppStatus.ABNORMAL.value
).where(DispatchApp.application == application.id).execute()
# 更新自动拨号系统队列 application 状态
AutoCallList.update(
status=AutoListStatus.REMOVED.value,
description='bill clear'
).where(AutoCallList.application == application.id).execute()
application.status = ApplicationStatus.REPAID.value
application.finished_at = datetime.now()
application.paid_at = datetime.now()
# 如果逾期天数为0说明没有逾期,该件不应该进bomber
if int(bill.get("overdue_days")) <= 0:
application.no_active = 1
(RepaymentLog.update(no_active=1)
.where(RepaymentLog.application == application.id)
.execute())
application.save()
bomber_id = application.latest_bomber_id
# c1b月底清件之后会入案,支付完成时要出案,2是默认的bomber_id
if (application.cycle in (Cycle.C1A.value,Cycle.C1B.value) and
not bomber_id):
bomber_id = application.cycle
if not bomber_id:
return
(DispatchAppHistory.update(
out_at=datetime.now()
).where(
DispatchAppHistory.application == application.id,
DispatchAppHistory.bomber_id == bomber_id)).execute()
if not application.latest_bomber_id:
return
item = (OldLoanApplication
.get_or_none(OldLoanApplication.status ==
OldLoanStatus.PROCESSING.value,
OldLoanApplication.application_id ==
application.id))
if item:
end_old_application(item, paid=True)
out_record(src_bomber_id=bomber_id,
application_ids=[item.application_id])
Inbox.create(
title='application %s cleared' % application.external_id,
content='application %s cleared' % application.external_id,
receiver=application.latest_bomber_id,
category=InboxCategory.CLEARED.value,
)
# 同步bill2
@action(MessageAction.OVERDUE_BILL_SYNC)
def overdue_bill_sync(payload, msg_id):
"""已废弃"""
bill2_list = payload
updated_count = 0
with db.atomic():
for bill in bill2_list:
principal = Decimal(bill['principal'])
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_count += Application.update(
amount=principal,
repay_at=repay_at,
).where(Application.id == bill['external_id']).execute()
logging.info('overdue sync done, updated count: %s', updated_count)
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)
def calc_overdue_days_over(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
#更新逾期天数大于95天的件
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query = (Application
.update(overdue_days=overdue_days)
.where(Application.status <<
[ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.AB_TEST.value],
Application.overdue_days > 95,
Application.type == ApplicationType.CASH_LOAN.value))
updated_rows_count = query.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count)
try:
calc_overdue_days_over_instalment()
except Exception as e:
logging.error("calc_overdue_days_over_instalment_error: %s"%str(e))
# 计算overdue_days后自动触发升级
apps = Application.filter(
Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value],
Application.overdue_days > 95,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(
MessageAction.BOMBER_AUTOMATIC_ESCALATION,
{'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
# 计算逾期天数超过95天的件的逾期天数
def calc_overdue_days_over_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.AB_TEST.value]
for status in sub_bill_status_list:
# 更新逾期天数
query = (OverdueBill.update(overdue_days=overdue_days)
.where(OverdueBill.status == status,
OverdueBill.overdue_days > 95))
updated_rows_count = query.execute()
logging.info("calc_overdue_days_over_instalment done,count:%s,status:%s" %
(updated_rows_count, status))
# 获取所有的子账单信息
overdue_bills = (OverdueBill
.select(OverdueBill.collection_id,
OverdueBill.overdue_days)
.join(Application, JOIN_LEFT_OUTER,
on=OverdueBill.collection_id == Application.id)
.where(Application.status == status,
(Application.type ==
ApplicationType.CASH_LOAN_STAGING.value)))
# 获取每个分期催收单要更新的逾期天数
app_update = {}
for ob in overdue_bills:
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
# 更新催收单的逾期天数
for aid, a_days in app_update.items():
q = (Application.update(overdue_days=a_days)
.where(Application.id == aid)
.execute())
logging.info("update instalment application done")
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)
def calc_overdue_days(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query_unclaimed = (Application
.update(overdue_days=overdue_days)
.where(Application.status ==
ApplicationStatus.UNCLAIMED.value,
Application.overdue_days <= 95,
(Application.type ==
ApplicationType.CASH_LOAN.value)))
updated_rows_count_unclaimed = query_unclaimed.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_unclaimed)
query_processing = (Application
.update(overdue_days=overdue_days)
.where(Application.status ==
ApplicationStatus.PROCESSING.value,
Application.overdue_days <= 95,
(Application.type ==
ApplicationType.CASH_LOAN.value)))
updated_rows_count_processing = query_processing.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_processing)
query_test = (Application
.update(overdue_days=overdue_days)
.where(Application.status ==
ApplicationStatus.AB_TEST.value,
Application.overdue_days <= 95,
(Application.type ==
ApplicationType.CASH_LOAN.value)))
updated_rows_count_test = query_test.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_test)
# 分期账单计算逾期天数
calc_overdue_days_instalment()
# 计算overdue_days后自动触发升级
apps = Application.select(Application.id).where(
Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value],
Application.overdue_days <= 95,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(
MessageAction.BOMBER_AUTOMATIC_ESCALATION,
{'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
# overdue_days 计算完成后,修改C1A_entry(预期天数为4的设为C1A)
Application.update(
C1A_entry=datetime.now()
).where(
Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value],
Application.overdue_days == 4
).execute()
# 分期的件计算逾期天数
def calc_overdue_days_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.AB_TEST.value]
# 获取当月第一天的时间
today_now_time = datetime.now()
month_first_day = today_now_time.replace(day=1,
hour=1,
minute=30,
second=0,
microsecond=0)
for status in sub_bill_status_list:
# 更新逾期天数
query = (OverdueBill.update(overdue_days = overdue_days)
.where(OverdueBill.status == status,
OverdueBill.overdue_days <= 95))
updated_rows_count = query.execute()
logging.info("calc_overdue_days_instalment done,count:%s,status:%s" %
(updated_rows_count, status))
# 获取所有的子账单信息
overdue_bills = (OverdueBill
.select(OverdueBill.status,
OverdueBill.created_at,
OverdueBill.collection_id,
OverdueBill.overdue_days)
.join(Application, JOIN_LEFT_OUTER,
on=OverdueBill.collection_id == Application.id)
.where(Application.status == status,
(Application.type ==
ApplicationType.CASH_LOAN_STAGING.value)))
# 获取每个分期催收单要更新的逾期天数
app_update = {}
for ob in overdue_bills:
# 排除到分期这个月之前还款完成的那一期
if (ob.status == ApplicationStatus.REPAID.value and
ob.created_at < month_first_day):
continue
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id],ob.overdue_days)
app_update[ob.collection_id] = ob_days
# 更新催收单的逾期天数
for aid,a_days in app_update.items():
q = (Application.update(overdue_days = a_days)
.where(Application.id == aid)
.execute())
logging.info("update instalment application done")
@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)
def automatic_escalation(payload, msg_id):
app_ids = payload.get('application_list', [])
if not app_ids:
return
# 过滤掉已完成的订单
apps = (Application.select()
.where(Application.id.in_(app_ids),
Application.status != ApplicationStatus.REPAID.value))
for a in apps:
new_cycle = application_entry_different_calculations(a)
if a.overdue_days < 90:
logging.info(
"automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}".format(
a.id, new_cycle, a.cycle, a.overdue_days))
if new_cycle > a.cycle:
with db.atomic():
if (a.latest_bomber_id or
a.cycle in (Cycle.C1A.value, Cycle.C1B.value)):
bomber_id = (a.latest_bomber_id
if a.latest_bomber_id else a.cycle)
(DispatchAppHistory.update(
out_at=datetime.now(),
out_overdue_days=a.overdue_days,
).where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == bomber_id
)).execute()
Escalation.create(
application=a.id,
type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value,
current_cycle=a.cycle,
escalate_to=new_cycle,
current_bomber_id=a.latest_bomber,
)
# 升级的时候如果是外包的件更新dispatch_app中的状态
dis_app_update = (DispatchApp
.update(status = DisAppStatus.ABNORMAL.value)
.where(DispatchApp.application == a.id))
dis_app_update.execute()
a.cycle = new_cycle
a.last_bomber = a.latest_bomber
a.status = ApplicationStatus.UNCLAIMED.value
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
# 升级之后 拨打次数清零
a.called_times = 0
if new_cycle == Cycle.C1B.value:
a.C1B_entry = datetime.now()
elif new_cycle == Cycle.C2.value:
a.C2_entry = datetime.now()
elif new_cycle == Cycle.C3.value:
a.C3_entry = datetime.now()
a.save()
logging.info('automatic escalation done')
# 把部分件的进入C1B的时间改为10天
def application_entry_different_calculations(app):
conf = {
1: [1, 10],
2: [11, 30],
3: [31, 60],
4: [61, 90],
5: [91, 999999],
}
for new_cycle,scopes in conf.items():
if scopes[0] <= app.overdue_days <= scopes[1]:
return new_cycle
return app.cycle
@action(MessageAction.BOMBER_CALC_SUMMARY)
def cron_summary(payload, msg_id):
"""已废弃"""
employees = Bomber.select(Bomber, Role).join(Role)
summary = {
i.id: {
'cycle': i.role.cycle,
'claimed': 0,
'completed': 0,
'cleared': 0,
'escalated': 0,
'transferred': 0,
'promised': 0,
'amount_recovered': Decimal(0),
'calls_made': 0,
'calls_connected': 0,
'sms_sent': 0,
}
for i in employees
}
# 每天 2点 15分 计算 昨天的情况
now_date = date.today()
cal_date = now_date - timedelta(days=1)
# 当日下了多少ptp
claimed = (Application
.select(Application.latest_bomber,
fn.COUNT(Application.id).alias('claimed'))
.where(fn.DATE(Application.claimed_at) == cal_date,
Application.status <<
[ApplicationStatus.PROCESSING.value,
ApplicationStatus.REPAID.value],
Application.latest_bomber.is_null(False))
.group_by(Application.latest_bomber))
# 当日ptp还款件数目
cleared = (Application
.select(Application.latest_bomber,
fn.COUNT(Application.id).alias('cleared'))
.where(fn.DATE(Application.finished_at) == cal_date,
Application.status == ApplicationStatus.REPAID.value,
Application.latest_bomber.is_null(False))
.group_by(Application.latest_bomber))
# 当日有多少个ptp被维护
completed = (Application
.select(Application.latest_bomber,
fn.COUNT(Application.id).alias('completed'))
.where(Application.latest_bombing_time.is_null(False),
fn.DATE(Application.latest_bombing_time) == cal_date,
Application.latest_bomber.is_null(False))
.group_by(Application.latest_bomber))
# 手工维护的件多少个件进入下一个cycle
escalated = (Escalation
.select(Escalation.current_bomber,
fn.COUNT(Escalation.id).alias('escalated'))
.where(fn.DATE(Escalation.created_at) == cal_date,
Escalation.type == EscalationType.AUTOMATIC.value,
Escalation.current_bomber.is_null(False),
Escalation.status == ApprovalStatus.APPROVED.value)
.group_by(Escalation.current_bomber))
# 当日从某人手上移出多少个件
transferred = (Transfer
.select(Transfer.operator,
fn.COUNT(Transfer.id).alias('transferred'))
.where(fn.DATE(Transfer.reviewed_at) == cal_date,
Transfer.status == ApprovalStatus.APPROVED.value)
.group_by(Transfer.operator))
# 当天的下p件有多少有进展
promised = (
BombingHistory
.select(BombingHistory.bomber,
fn.COUNT(BombingHistory.id).alias('promised'))
.where(fn.DATE(BombingHistory.created_at) == cal_date,
BombingHistory.result == BombingResult.HAS_PROGRESS.value)
.group_by(BombingHistory.bomber)
)
# 当天催回的金额
amount_recovered = (RepaymentLog
.select(RepaymentLog.current_bomber,
fn.SUM(RepaymentLog.principal_part)
.alias('principal_part'),
fn.SUM(RepaymentLog.late_fee_part)
.alias('late_fee_part'))
.where(fn.DATE(RepaymentLog.repay_at) == cal_date,
RepaymentLog.is_bombed == True,
RepaymentLog.current_bomber.is_null(False))
.group_by(RepaymentLog.current_bomber))
# calllog表已废弃
calls_made = (CallLog
.select(CallLog.user_id,
fn.COUNT(CallLog.record_id).alias('calls_made'))
.where(fn.DATE(CallLog.time_start) == cal_date,
CallLog.system_type == '1')
.group_by(CallLog.user_id))
# calllog表已废弃
calls_connected = (CallLog
.select(CallLog.user_id,
fn.COUNT(CallLog.record_id)
.alias('calls_connected'))
.where(fn.DATE(CallLog.time_start) == cal_date,
CallLog.duration > 10,
CallLog.system_type == '1').
group_by(CallLog.user_id))
# 当天发送的所有短信
sms_sent = (ConnectHistory
.select(ConnectHistory.operator,
fn.COUNT(ConnectHistory.id).alias('sms_sent'))
.where(ConnectHistory.type.in_(ConnectType.sms()),
ConnectHistory.created_at >= cal_date,
ConnectHistory.created_at < now_date
)
.group_by(ConnectHistory.operator))
for i in claimed:
summary[i.latest_bomber_id]['claimed'] += i.claimed
for i in completed:
summary[i.latest_bomber_id]['completed'] += i.completed
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
for i in escalated:
summary[i.current_bomber_id]['escalated'] += i.escalated
for i in transferred:
summary[i.operator_id]['transferred'] += i.transferred
for i in promised:
summary[i.bomber_id]['promised'] += i.promised
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in calls_made:
summary[int(i.user_id)]['calls_made'] += i.calls_made
for i in calls_connected:
summary[int(i.user_id)]['calls_connected'] += i.calls_connected
for i in sms_sent:
summary[i.operator_id]['sms_sent'] += i.sms_sent
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({
'bomber': bomber_id,
'cycle': data['cycle'],
'claimed': data['claimed'],
'completed': data['completed'],
'cleared': data['cleared'],
'escalated': data['escalated'],
'transferred': data['transferred'],
'promised': data['promised'],
'amount_recovered': data['amount_recovered'],
'calls_made': data['calls_made'],
'calls_connected': data['calls_connected'],
'sms_sent': data['sms_sent'],
'date': cal_date,
})
if insert_args:
Summary.insert_many(insert_args).execute()
cycle_args = []
# cal new in
# 按照 cycle 统计
escalated_in = (Escalation
.select(Escalation.escalate_to,
fn.COUNT(Escalation.id).alias('escalated_in'))
.where(Escalation.status == ApprovalStatus.APPROVED.value,
fn.DATE(Escalation.created_at) == cal_date)
.group_by(Escalation.escalate_to))
for i in escalated_in:
cycle_args.append({
'cycle': i.escalate_to,
'escalated_in': i.escalated_in,
'date': cal_date,
})
amount_recovered_total = (
RepaymentLog
.select(RepaymentLog.cycle,
fn.SUM(RepaymentLog.principal_part).alias('principal_part'),
fn.SUM(RepaymentLog.late_fee_part).alias('late_fee_part'))
.where(fn.DATE(RepaymentLog.repay_at) == cal_date)
.group_by(RepaymentLog.cycle)
)
for i in amount_recovered_total:
amount_recovered_total = i.principal_part + i.late_fee_part
cycle_args.append({
'cycle': i.cycle,
'amount_recovered_total': amount_recovered_total,
'date': cal_date,
})
if cycle_args:
Summary.insert_many(cycle_args).execute()
logging.info('cal summary done')
# 报表计算结束后 再更新逾期天数 触发自动升级
send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})
@action(MessageAction.BOMBER_CALC_SUMMARY2)
def cron_summary2(payload, msg_id):
"""已废弃,定时任务还在执行,具体情况待确定"""
cal_date = date.today() - timedelta(days=1)
employees = Bomber.select(Bomber, Role).join(Role)
auto_call_actions = (
AutoCallActions
.select(
AutoCallActions.bomber,
AutoCallActions.result,
fn.COUNT(AutoCallActions.id).alias('count')
)
.where(fn.DATE(AutoCallActions.created_at) == cal_date)
)
amount_recovered = (RepaymentLog
.select(RepaymentLog.current_bomber,
fn.SUM(RepaymentLog.principal_part)
.alias('principal_part'),
fn.SUM(RepaymentLog.late_fee_part)
.alias('late_fee_part'))
.where(fn.DATE(RepaymentLog.repay_at) == cal_date,
RepaymentLog.current_bomber.is_null(False),
RepaymentLog.is_bombed == True))
cleared = (Application
.select(Application.latest_bomber,
fn.COUNT(Application.id).alias('cleared'))
.where(fn.DATE(Application.finished_at) == cal_date,
Application.status == ApplicationStatus.REPAID.value,
Application.latest_bomber.is_null(False)))
auto_call_actions = auto_call_actions.group_by(
AutoCallActions.bomber, AutoCallActions.result
)
amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)
cleared = cleared.group_by(Application.latest_bomber)
summary = {
e.id: {
'cycle': e.role.cycle,
'answered_calls': 0,
'ptp': 0,
'follow_up': 0,
'not_useful': 0,
'cleared': 0,
'amount_recovered': 0,
}
for e in employees
}
for a in auto_call_actions:
summary[a.bomber_id]['answered_calls'] += a.count
if a.result == AutoCallResult.PTP.value:
summary[a.bomber_id]['ptp'] += a.count
if a.result == AutoCallResult.FOLLOW_UP.value:
summary[a.bomber_id]['follow_up'] += a.count
if a.result == AutoCallResult.NOT_USEFUL.value:
summary[a.bomber_id]['not_useful'] += a.count
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({
'bomber': bomber_id,
'cycle': data['cycle'],
'answered_calls': data['answered_calls'],
'ptp': data['ptp'],
'follow_up': data['follow_up'],
'not_useful': data['not_useful'],
'cleared': data['cleared'],
'amount_recovered': str(data['amount_recovered']),
'date': cal_date,
})
if insert_args:
Summary2.insert_many(insert_args).execute()
@action(MessageAction.BOMBER_SYNC_CONTACTS)
def sync_suggested_contacts(payload, msg_id):
""" suggested contacts sync """
applications = (Application
.select(Application.id, Application.user_id)
.where(Application.status <<
[ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value]))
logging.debug('start sync contact')
for a in applications:
sync_contacts(a)
logging.info('contact sync finished')
def sync_contacts(application):
logging.info('application %s start sync contact', application.id)
# 添加联系人信息
contacts = Contact.filter(Contact.user_id == application.user_id)
existing_numbers = {contact.number for contact in contacts}
# sms contacts
insert_contacts = []
sms_contacts = GoldenEye().get(
'/applications/%s/sms-contacts' % application.external_id
)
if not sms_contacts.ok:
sms_contacts = []
logging.info('get user %s sms contacts failed', application.external_id)
else:
sms_contacts = sms_contacts.json()['data']
for i in sms_contacts:
if i['number'] in existing_numbers:
continue
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'],
'number': i['number'],
'relationship': Relationship.SUGGESTED.value,
'source': 'sms contacts',
'real_relationship': Relationship.SUGGESTED.value
})
existing_numbers.add(i['number'])
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
# call frequency
insert_contacts = []
cf = GoldenEye().get(
'/applications/%s/call/frequency' % application.external_id
)
if not cf.ok:
call_frequency = []
logging.error('get application %s call frequency error',
application.external_id)
else:
call_frequency = cf.json()['data']
with db.atomic():
for i in call_frequency:
if i['number'] in existing_numbers:
(Contact
.update(total_count=i['total_count'],
total_duration=i['total_duration'])
.where(Contact.number == i['number'],
Contact.user_id == application.user_id))
continue
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'],
'number': i['number'],
'relationship': Relationship.SUGGESTED.value,
'total_count': i['total_count'],
'total_duration': i['total_duration'],
'source': 'call frequency',
'real_relationship': Relationship.SUGGESTED.value
})
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
@action(MessageAction.BOMBER_AUTO_SMS)
@deprecated(version='1.0', reason='This function will be removed soon')
def bomber_auto_sms(payload, msg_id):
day_diff = int(payload['day_diff'])
custom_type = payload.get('custom_type')
msg_type = payload['msg_type']
logging.info('auto sms %s sending', msg_type)
applications = (
Application
.select()
.where(Application.overdue_days == day_diff,
Application.status << [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value],
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < datetime.today().date()))
)
if custom_type == 'new':
applications = applications.where(Application.loan_success_times < 3)
if custom_type == 'old':
applications = applications.where(Application.loan_success_times >= 3)
templates = (
Template.select(Template.text, Template.app)
.where(Template.type == ConnectType.AUTO_SMS.value,
Template.id << Template.get_auto_sms_tpl(msg_type))
)
tpl_text = dict()
for tpl in templates:
tpl_text[tpl.app] = tpl.text
data_list = []
for a in applications:
tpl_data = {
'user_name': a.user_name,
'due_days': a.overdue_days,
'app_name': a.app,
'phone': a.user_mobile_no,
'cs_number': cs_number_conf.get(a.app, '02150202889'),
}
content = tpl_text[a.app].format(**tpl_data)
data_list.append({
'phone': '62' + a.user_mobile_no,
'content': content,
'app': a.app,
})
if not data_list:
logging.info('auto sms %s do not need sending', msg_type)
return
send_sms(data_list, msg_type, SmsChannel.NUSA.value)
@action(MessageAction.BOMBER_AUTO_MESSAGE_DAILY)
def bomber_auto_message_daily(payload, msg_id):
app_dict = dict(zip(AppName.keys(), AppName.values()))
#当天自动外呼成功的电话记录
auto_call_list = AutoCallActionsR \
.select(AutoCallActionsR.application_id) \
.where(fn.DATE(AutoCallActionsR.created_at) == fn.CURDATE())
applications = (
ApplicationR
.select()
.where(ApplicationR.overdue_days < 30,
ApplicationR.overdue_days > 4,
ApplicationR.type == ApplicationType.CASH_LOAN.value,
ApplicationR.status << [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.AB_TEST.value],
ApplicationR.promised_date.is_null(True) |
(fn.DATE(ApplicationR.promised_date) < datetime.today().date()),
~(ApplicationR.id << auto_call_list))
)
stage_list1 = range(*AutoCallMessageCycle.NEW_STAGE1.value['scope'], 3) #5,8,11,14
stage_list2 = range(*AutoCallMessageCycle.STAGE2.value['scope'], 3) #15,18
stage_list3 = range(*AutoCallMessageCycle.STAGE3.value['scope'], 3)
sms_list = defaultdict(list)
fcm_list = defaultdict(list)
for a in applications:
overdue_type = ''
if a.overdue_days in stage_list1:
if a.loan_success_times < 3:
overdue_type = AutoCallMessageCycle.NEW_STAGE1.value['type']
else:
overdue_type = AutoCallMessageCycle.OLD_STAGE1.value['type']
if a.overdue_days in stage_list2:
overdue_type = AutoCallMessageCycle.STAGE2.value['type']
if a.overdue_days in stage_list3:
overdue_type = AutoCallMessageCycle.STAGE3.value['type']
if overdue_type == '':
continue
# format app name
app_name = app_dict.get(a.app.upper(), AppName.default().value)
try:
tpl_id = Template.get_daily_auto_sms_tpl(overdue_type, app_name)
except KeyError:
logging.warning('Key error {}, id is {}'.format(
(overdue_type, app_name), a.id))
continue
data_map = {
'user_name': a.user_name,
'app_name': app_name,
'overdue_days': a.overdue_days,
'cs_number': cs_number_conf.get(a.app, '')
}
sms_list[(overdue_type, tpl_id, a.app)].append({
'receiver': '62' + a.user_mobile_no,
'data_map': data_map
})
fcm_list[(overdue_type, tpl_id, a.app)].append({
'receiver': a.user_id,
'data_map': data_map
})
for (msg_type, tpl_id, app_name), data_list in sms_list.items():
auto_send_sms_and_fcm(data_list, tpl_id, app_name, "SMS")
for (msg_type, tpl_id, app_name), data_list in sms_list.items():
auto_send_sms_and_fcm(data_list, tpl_id, app_name, "FCM")
#分期逾期短信
@action(MessageAction.BOMBER_INSTALMENT_AUTO_MESSAGE_DAILY)
def bomber_instalment_auto_message_daily(payload, msg_id):
applications = (ApplicationR.select(ApplicationR.id,
ApplicationR.app,
ApplicationR.user_id,
ApplicationR.user_name,
ApplicationR.user_mobile_no,
ApplicationR.loan_success_times,
OverdueBillR.status,
OverdueBillR.sub_bill_id,
OverdueBillR.overdue_days, )
.join(OverdueBillR, JOIN_LEFT_OUTER,
on=ApplicationR.id == OverdueBillR.collection_id)
.where(ApplicationR.type ==
ApplicationType.CASH_LOAN_STAGING.value,
ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.overdue_days < 90,
ApplicationR.promised_date.is_null(True) |
(fn.DATE(
ApplicationR.promised_date) < datetime.today().date()),
)
.dicts())
# 计算真实的逾期天数和欠款情况
app_overdues = {}
for app in applications:
if app["status"] == ApplicationStatus.REPAID.value:
continue
if app["id"] in app_overdues:
overdue_days = app_overdues[app["id"]]["overdue_days"]
app_overdues[app["id"]]["overdue_days"] = max(app["overdue_days"],
overdue_days)
app_overdues[app["id"]]["bill_sub_ids"].append(app["sub_bill_id"])
else:
app_overdues[app["id"]] = {
"app_name": app["app"],
"user_id": app["user_id"],
"user_name": app["user_name"],
"overdue_days": app["overdue_days"],
"bill_sub_ids": [app["sub_bill_id"]],
"phone": '62' + app["user_mobile_no"],
"loan_success_times": app["loan_success_times"],
"cs_number": cs_number_conf.get(app["app"], '02150202889')
}
# 获取需要发短信的催收单和计算对应的未支付金额
sms_dict = {}
sub_bill_ids = []
send_message = defaultdict(list)
send_fcm = defaultdict(list)
for aid, app in app_overdues.items():
message_id = Template.get_daily_instalment_auto_sms_tpl(
overdue_days=app["overdue_days"],
loan_times=app["loan_success_times"]
)
if message_id:
app["tpl_id"] = message_id
sms_dict[aid] = app
sub_bill_ids.extend(app["bill_sub_ids"])
if not sms_dict:
logging.info("no application need send sms")
return
sub_bills = []
try:
for index in range(0,len(sub_bill_ids),30):
sub_bill = BillService().sub_bill_list(
bill_sub_ids=sub_bill_ids[index:index+30])
sub_bills += sub_bill
except Exception as e:
logging.info("send sms get bill error:%s" % str(e))
return
sub_bills_dict = {int(sb["id"]): sb for sb in sub_bills}
for aid, app in sms_dict.items():
amount = 0
for sbid in app["bill_sub_ids"]:
amount += sub_bills_dict.get(sbid, {}).get("unpaid", 0)
data_map = {
"user_name": app["user_name"],
"app_name": app["app_name"],
"overdue_days": app["overdue_days"],
"cs_number": app["cs_number"],
"amount": str(amount)
}
send_message[(app['tpl_id'], app["app_name"])].append({
"receiver": app["phone"],
"data_map": data_map
})
send_fcm[(app['tpl_id'], app["app_name"])].append({
"receiver": app["user_id"],
"data_map": data_map
})
for (tpl_id, app_name), data_list in send_message.items():
auto_send_sms_and_fcm(data_list, tpl_id, app_name, "SMS")
for (msg_type, tpl_id, app_name), data_list in send_fcm.items():
auto_send_sms_and_fcm(data_list, tpl_id, app_name, "FCM")
def auto_send_sms_and_fcm(data_list, tpl_id, app_name, message_type):
if not data_list:
return
# 200 条 一次请求
for idx in range(0, len(data_list), 200):
request_json = {
"app_name": app_name,
"failed_retry": True,
"is_masking": True,
"list": data_list[idx: idx+200],
"message_level": 1,
"message_type": message_type,
"sms_type": 4 if message_type == "SMS" else 0,
"type_id": tpl_id
}
try:
result = MessageService().send_batch_template(**request_json)
if not result.get("result"):
logging.error()
except Exception as e:
logging.error()
return
logging.info("")
def get_danamall_msg_service(app_name, message_service):
if app_name == AppName.DANAMALL.value:
# token = app.config['service.message.%s.token' % app_name.lower()]
message_service = Message(version=app_name)
return message_service
#催收员发送短信,提醒承诺时间
@action(MessageAction.BOMBER_REMIND_PROMISE)
def bomber_remind_promise(payload, msg_id):
day_diff = int(payload['day_diff'])
msg_type = payload['msg_type']
logging.info('auto sms %s sending', msg_type)
applications = (
Application
.select()
.where(
fn.DATEDIFF(fn.NOW(), Application.promised_date) == day_diff,
Application.status << [
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value,
]
)
)
templates = (
Template
.select(Template.text, Template.app)
.where(Template.type == ConnectType.AUTO_SMS.value,
Template.id << Template.get_auto_sms_tpl(msg_type))
)
tpl_text = {tpl.app: tpl.text for tpl in templates}
message_date_dict = defaultdict(list)
for a in applications:
tpl_data = {
'user_name': a.user_name,
'due_days': a.overdue_days,
'app_name': a.app,
'phone': a.user_mobile_no,
'cs_number': cs_number_conf.get(a.app, '02150202889'),
'promised_date': a.promised_date.strftime('%d-%m-%Y'),
}
content = tpl_text[a.app].format(**tpl_data)
message_date_dict[a.app].append(
{
"content": content,
"receiver": '62' + a.user_mobile_no,
"title": ""
}
)
for app_name, data_list in message_date_dict.items():
send_sms(data_list, msg_type, app_name)
@action(MessageAction.BOMBER_DISCOUNT_APPROVED)
def bomber_discount_approved(payload, msg_id):
app_id = payload['id']
msg_type = payload['msg_type']
discount_to = payload['discount_to']
effective_to = payload['effective_to']
application = Application.filter(Application.id == app_id).first()
if not application:
logging.error('discount approved msg send failed '
'application %s not found', app_id)
return
template = (
Template
.select(Template.text, Template.app)
.where(Template.type == ConnectType.AUTO_SMS.value,
Template.id << Template.get_auto_sms_tpl(msg_type),
Template.app == application.app)
.first()
)
if not template:
logging.error('discount approved msg send failed '
'template %s not found', msg_type)
return
promised_date = None
if application.promised_date:
promised_date = application.promised_date.strftime('%d-%m-%Y')
tpl_data = {
'user_name': application.user_name,
'due_days': application.overdue_days,
'app_name': application.app,
'phone': application.user_mobile_no,
'cs_number': cs_number_conf.get(application.app, '02150202889'),
'promised_date': promised_date,
'discount_to': discount_to,
'effective_to': effective_to,
}
content = template.text.format(**tpl_data)
data_list = [{
'receiver': '62' + application.user_mobile_no,
'content': content,
'title': "",
}]
send_sms(data_list, msg_type, application.app)
# 批量发送自定义短信
def send_sms(data_list, msg_type, app_name):
if not data_list:
return
for index in range(0, len(data_list), 200):
req_data = {
"app_name": app_name,
"failed_retry": True,
"is_masking": True,
"list": data_list[index: index+200],
"message_level": 0,
"message_type": "SMS",
"sms_type": 3
}
try:
result = MessageService().send_batch(**req_data)
if not result.get("result"):
logging.error(
"send_sms_failed:%s,req:%s,res:%s",msg_type,req_data,result)
except Exception as e:
logging.error(
"send_sms_error:%s,req:%s,res:%s,error:%s" % (
msg_type, req_data, result, str(e)))
return
logging.info("send_sms_success:%s", msg_type)
#生成自动外呼,和分件
@action(MessageAction.BOMBER_AUTO_CALL_LIST)
def bomber_auto_call_list(payload, msg_id):
with db.atomic():
#单期件分件,分给各期的外包后,余下分配内部指定id,的bomber
#外包主要通过partner区分不同阶段,同时识别bomber中的partner_id来识别外包账号
bomber_dispatch_app()
# 分期件分件,分件主要靠installment 识别不同期的bomber
dispatch_instalment_app()
#分件记录
dis_apps = (DispatchApp
.select(DispatchApp.application)
.where(DispatchApp.status == DisAppStatus.NORMAL.value))
c1_apps = (
Application
.select(Application.id,
Application.cycle,
Application.follow_up_date,
Application.called_times)
.where(
Application.status.not_in([ApplicationStatus.REPAID.value,
ApplicationStatus.AB_TEST.value]),
Application.cycle == Cycle.C1A.value,
Application.is_rejected == False, # noqa
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < datetime.today().date())
).order_by(Application.overdue_days, Application.apply_at)
)
dis_apps_ids = [da.application_id for da in dis_apps]
insert_args = []
for a in c1_apps:
if a.id in dis_apps_ids:
continue
insert_args.append({
'application': a.id,
'cycle': a.cycle,
'follow_up_date': a.follow_up_date,
'called_times': 1 if a.called_times else 0,
'description': 'init'
})
if not insert_args:
logging.error('no application need auto call')
#检索application表,插入数据至auto_call_list
with db.atomic():
AutoCallList.delete().execute()
for idx in range(0, len(insert_args), 100):
AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()
for idx in range(0, len(insert_args), 100):
application_list = [
i['application']
for i in insert_args[idx:idx + 100]
]
#获取校验后有效的电话号码
send_to_default_q(
MessageAction.BOMBER_AUTO_CALL_CONTACT,
{'application_list': application_list}
)
logging.info('bomber generate auto call list finished')
#将未下P,特定天数的件重分,即积压时间长的件,在分配
send_to_default_q(
MessageAction.UPDATE_BOMBER_FOR_SPECIAL,
{})
class ChangeBomberTool(object):
@staticmethod
def in_record(bomber_id, ids, bd):
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'),
R('null').alias('partner_id'),
SQL('DATE_ADD(CURDATE(),INTERVAL 14 DAY)')
.alias('expected_out_time'),
Application.overdue_days.alias(
'entry_overdue_days'))
.where(Application.status !=
ApplicationStatus.REPAID.value,
Application.id << ids))
(Application
.update(latest_bomber=bomber_id)
.where(Application.id.in_(ids))
.execute())
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bd),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@staticmethod
def out_record(a, bd):
_id = str(a.id)
(DispatchAppHistory.update(
out_at=datetime.now(),
out_overdue_days=a.overdue_days,
out_principal_pending=(
a.amount -
Decimal(bd[_id].get('principal_paid'))),
out_late_fee_pending=(
bd[_id].get('late_fee') -
bd[_id].get('late_fee_paid')),
)
.where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == a.latest_bomber_id
)).execute()
a.last_bomber = a.latest_bomber
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
a.save()
@staticmethod
def classify(l, b):
if len(l) == 1:
return l[0]
_l = filter(lambda x: x['bomber'] != b, l)
return min(_l, key=lambda x: len(x['ids']))
@action(MessageAction.UPDATE_BOMBER_FOR_SPECIAL)
def update_bomber_for_special(payload, msg_id):
"""
cycle 1b 每天将DPD21且没有处于下P状态的件,分配给另一个催收员
cycle 2 每天将DPD46且没有处于下P状态的件,分配给另一个催收员
cycle 3 每天将dpd76且没有处于下p状态的件,分配给另一个催收员
:param payload:
:param msg_id:
:return:
"""
filter_list = {Cycle.C1B.value: {"overdue_days": 21, "role_id": 5},
Cycle.C2.value: {"overdue_days": 46, "role_id": 6},
Cycle.C3.value: {"overdue_days": 76, "role_id": 8}}
cbt = ChangeBomberTool()
for cycle, values in filter_list.items():
overdue_days = values["overdue_days"]
bombers = (Bomber.select()
.where(Bomber.role == values["role_id"],
Bomber.instalment == 0,
Bomber.is_del == 0))
bids = {b.id:b for b in bombers}
apps = (Application.select()
.where(Application.cycle == cycle,
Application.type == ApplicationType.CASH_LOAN.value,
Application.overdue_days == overdue_days,
Application.status == ApplicationStatus.AB_TEST.value,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < date.today()),
Application.latest_bomber_id.in_(list(bids.keys()))))
classify_dict = defaultdict(list)
for b in bombers:
classify_dict[b.group_id].append({"bomber": b.id, "ids": []})
with db.atomic():
app_ids = [i.id for i in apps]
if app_ids and bids:
bills = BillService().bill_list(application_ids=app_ids)
bill_dict = {str(bill['application_id']): bill for bill in
bills}
for i in apps:
current_bomber = bids.get(i.latest_bomber_id)
if not current_bomber:
continue
classify_list = classify_dict.get(current_bomber.group_id)
d = cbt.classify(classify_list, i.latest_bomber_id)
d["ids"].append(i.id)
cbt.out_record(i, bill_dict)
for group_id, cl_list in classify_dict.items():
for item in cl_list:
cbt.in_record(item["bomber"], item["ids"], bill_dict)
else:
logging.info(
"cycle:{} empty application list {} or bomber list {}".format(
cycle, app_ids, list(bids.keys())))
try:
update_bomber_for_special_instalment()
except Exception as e:
logging.error("special_instalment_error:%s"%str(e))
# 分期c2,c3特殊分件
def update_bomber_for_special_instalment():
filter_list = {Cycle.C1B.value: 21, Cycle.C2.value: 46, Cycle.C3.value: 76}
for cycle,overdue_days in filter_list.items():
# 获取分期指定的催收员
bombers = (Bomber.select().where(Bomber.instalment == cycle,
Bomber.is_del == 0))
bids = {b.id:b for b in bombers}
# 获取催收单
apps = (Application.select()
.where(Application.cycle == cycle,
Application.status == ApplicationStatus.AB_TEST.value,
Application.type ==
ApplicationType.CASH_LOAN_STAGING.value,
Application.overdue_days == overdue_days,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < date.today()),
Application.latest_bomber_id.in_(list(bids.keys()))))
classify_dict = defaultdict(list)
for b in bombers:
classify_dict[b.group_id].append({"bomber":b.id, "ids":[]})
for a in apps:
current_bomber = bids.get(a.latest_bomber_id)
if not current_bomber:
continue
classify_list = classify_dict.get(current_bomber.group_id)
d = ChangeBomberTool.classify(classify_list, a.latest_bomber_id)
d["ids"].append(a.id)
with db.atomic():
for group_id,classify_list in classify_dict.items():
for cl in classify_list:
aids = cl["ids"]
if not aids:
continue
latest_bomber_id = cl["bomber"]
q = (Application.update(latest_bomber = latest_bomber_id,
last_bomber = Application.latest_bomber)
.where(Application.id << aids)
.execute())
record_param = {
"cycle": cycle,
"application_ids": aids,
"dest_bomber_id": latest_bomber_id,
}
out_and_in_record_instalment(**record_param)
def bomber_dispatch_app():
# 将单期件c1a分件给外包,外包需设置,partner
try:
c1a_dispatch_app()
except Exception as e:
logging.error("c1a_dispatch_app error:%s"%str(e))
cycle = {
1: 10,
2: 30,
3: 60,
4: 90
}
# 单期外包 Cycle.C2 overdue_day 31
apps = (Application.select()
.where(fn.DATE(Application.C2_entry) == date.today(),
Application.type == ApplicationType.CASH_LOAN.value))
partners = (Partner.select()
.where(Partner.status == PartnerStatus.NORMAL.value,
Partner.cycle == Cycle.C2.value))
apps_ids = [a.id for a in apps]
dispatch_inserts = []
start_index = 0
apps_length = len(apps_ids)
logging.warning('apps length %s' % str(apps_length))
for p in partners: # 目前就一个partner
bombers = (Bomber.select()
.where(Bomber.partner == p.id,
Bomber.status != BomberStatus.OUTER_LEADER.value,
Bomber.is_del == 0))
gen = CycleIter([b.id for b in bombers])
existing_list = []
end_index = start_index + int(apps_length * p.app_percentage)
logging.info('partner length %s' % str(end_index))
if not apps_ids[start_index:end_index]:
continue
bills = BillService().bill_list(
application_ids=apps_ids[start_index:end_index])
bill_dict = {bill['application_id']: bill for bill in bills}
for a_id in apps_ids[start_index:end_index]:
bomber = average_gen(gen, existing_list)
q = (DispatchApp.delete()
.where(DispatchApp.application == a_id)
.execute())
dispatch_inserts.append({
'application': a_id,
'bomber': bomber,
'partner': p.id,
})
# 件分给外包后,对数据进行备份以备数据分析
application = (Application.select()
.where(Application.id == a_id)).first()
application.latest_bomber = bomber
application.status = ApplicationStatus.AB_TEST.value
application.ptp_bomber = None
application.save()
day_next_cycle = (cycle.get(application.cycle) -
application.overdue_days)
DispatchAppHistory.create(
application=a_id,
partner_id=p.id,
bomber_id=bomber,
entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=(
application.amount -
Decimal(bill_dict[a_id].get('principal_paid'))),
entry_late_fee_pending=(
Decimal(bill_dict[a_id].get('late_fee')) -
Decimal(bill_dict[a_id].get('late_fee_paid'))),
expected_out_time=(date.today() +
timedelta(days=day_next_cycle))
)
start_index = end_index
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
# AB test 分件(人工维护分件)
config = SystemConfig.prefetch(SCI.AB_TEST_C2)
c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)
# 余下的单期件分给内部指定催收员id [76, 100, 106, 107, 213, 215, 216, 221, 222, 223, 226, 235]
c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)
#python库的application id
c2 = apps_ids[start_index:]
if c2:
bills = BillService().bill_list(
application_ids=c2)
else:
bills = []
#java库的bill
bill_dict = {bill['application_id']: bill for bill in bills}
logging.info('c2 AB_test length: %s' % str(c2))
gen = CycleIter(c2_bomber)
existing_list = []
for c in c2:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == c).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = 46 - application.overdue_days
DispatchAppHistory.create(
application=c,
bomber_id=bomber,
entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=(application.amount
- bill_dict[c].get('principal_paid', 0)),
entry_late_fee_pending=(
bill_dict[c].get('late_fee', 0) -
bill_dict[c].get('late_fee_paid', 0)),
expected_out_time=(date.today() + timedelta(days=day_next_cycle))
)
ab_test_other()
# 单期的件部分分给外包,内部的C1a 不用分件进入自动外呼
def c1a_dispatch_app():
today = datetime.today().date()
tomorrow = today + timedelta(days=1)
#获取单期的件
c1a_apps = (Application.select()
.where(Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value],
Application.dpd1_entry >= today,
Application.dpd1_entry < tomorrow,
Application.type == ApplicationType.CASH_LOAN.value))
all_aids = [a.id for a in c1a_apps]
# 获取外包部门
partners = (Partner.select()
.where(Partner.status == PartnerStatus.NORMAL.value,
Partner.cycle == Cycle.C1A.value))
end = 0
for p in partners:
#直接通过partner 获取bomber
bombers = (Bomber.select()
.where(Bomber.partner == p.id,
Bomber.is_del == 0))
start = end
end += int(len(all_aids) * p.app_percentage)
aids = all_aids[start:end]
bids = [b.id for b in bombers]
if not bids or not aids:
continue
# 获取每个外包应该分到的件的个数
average_number = get_average_number(len(aids),len(bids))
p_end = 0
for i,bid in enumerate(bids):
p_start = p_end
p_end += average_number[i]
b_aids = aids[p_start:p_end]
with db.atomic():
q = (Application
.update(latest_bomber = bid,
status = ApplicationStatus.AB_TEST.value)
.where(Application.id << b_aids)
.execute())
params = {
"cycle": Cycle.C1A.value,
"dest_partner_id": p.id,
"application_ids": b_aids,
"dest_bomber_id": bid
}
new_in_record(**params)
try:
dispatch_inserts = []
for aid in b_aids:
dispatch_inserts.append({'application': aid,
'bomber': bid,
'partner': p.id,
'status': DisAppStatus.NORMAL.value})
if dispatch_inserts:
q = (DispatchApp.insert_many(dispatch_inserts).execute())
except Exception as e:
logging.error("c1a分件写入dispatch_app error:%s"%str(e))
def ab_test_other():
cycle_upper = {
1: 10,
2: 30,
3: 60,
4: 76
}
c1b = (Application.select()
.where(fn.DATE(Application.C1B_entry) == date.today(),
Application.type == ApplicationType.CASH_LOAN.value)
.order_by(-Application.overdue_days)
)
c1b_id = [a.id for a in c1b]
dis_app_update = (DispatchApp.update(status=DisAppStatus.ABNORMAL.value)
.where(DispatchApp.application.in_(c1b_id)))
dis_app_update.execute()
c3 = (Application.select()
.where(fn.DATE(Application.C3_entry) == date.today(),
Application.type == ApplicationType.CASH_LOAN.value))
all_id = [b.id for b in c3]
try:
# 将C3的件一部分分配给外包
partners = (Partner.select()
.where(Partner.status == PartnerStatus.NORMAL.value,
Partner.cycle == Cycle.C3.value))
start_index, end_index, out_apps = 0, 0, {}
for p in partners:
end_index += int(len(all_id) * p.app_percentage)
out_apps[p.id] = all_id[start_index:end_index]
start_index = end_index
c3_id = all_id[end_index:]
allot_c3_case(out_apps)
except:
c3_id = all_id
config = SystemConfig.prefetch(SCI.AB_TEST_C1B, SCI.AB_TEST_C3)
c1b_bomber = config.get(SCI.AB_TEST_C1B, SCI.AB_TEST_C1B.default_value)
c3_bomber = config.get(SCI.AB_TEST_C3, SCI.AB_TEST_C3.default_value)
# 过滤掉催分期的催收员
c3_bomber = get_cash_bomber(c3_bomber, Cycle.C3.value)
data = [{'ids': c1b_id, 'bomber': c1b_bomber, 'index': 0, 'cycle': 2},
{'ids': c3_id, 'bomber': c3_bomber, 'index': 1, 'cycle': 4}]
for d in data:
applications = d.get('ids')
length = len(applications)
end = int(length * d.get('index'))
gen = CycleIter(d.get('bomber'))
existing_list = []
if not applications:
continue
bills = BillService().bill_list(
application_ids=applications)
bill_dict = {bill['application_id']: bill for bill in bills}
for a in applications[:end]:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == a).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = (cycle_upper.get(application.cycle) -
application.overdue_days)
DispatchAppHistory.create(
application=a,
bomber_id=bomber,
entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=(application.amount -
bill_dict[a]['principal_paid']),
entry_late_fee_pending=(bill_dict[a]['late_fee'] -
bill_dict[a]['late_fee_paid']),
expected_out_time=(date.today() +
timedelta(days=day_next_cycle))
)
# 根据partner表中的配置给外包团队分件。
if d.get('cycle') == Cycle.C1B.value:
c1b_wb_partner = (Partner.select()
.where(Partner.cycle == Cycle.C1B.value,
Partner.status ==
PartnerStatus.NORMAL.value))
# 获取c1b外包团队
c1b_wb_p_dict = { str(p.id):p.app_percentage for p in c1b_wb_partner}
c1b_wb_pids = list(map(int, c1b_wb_p_dict.keys()))
c1b_wb_bombers = (Bomber.select()
.where(Bomber.is_del == 0,
Bomber.partner_id << c1b_wb_pids,
Bomber.password.is_null(False)))
# 获取每个外包团队的成员和团队应分的件数
c1b_wb_pba = {}
apps_num = len(applications)
for cb in c1b_wb_bombers:
cb_key = str(cb.partner_id)
if cb_key in c1b_wb_pba:
c1b_wb_pba[cb_key]["bids"].append(cb.id)
else:
# 获取比例,计算分配给外包的件的个数
start = end
percentage = c1b_wb_p_dict.get(cb_key, 0)
end = start + ceil(apps_num * percentage)
c1b_wb_pba[cb_key] = {
"bids": [cb.id],
"pid": cb.partner_id,
"apps": applications[start:end]
}
# 获取现金贷c1b新件剩余的件
inner_c1b_apps = applications[end:]
dispatch_c1b_inner_apps(aids=inner_c1b_apps,
bills=bill_dict,
period=cycle_upper.get(Cycle.C1B.value))
for pid,c1b_wb in c1b_wb_pba.items():
c1b_wb_apps = c1b_wb["apps"]
c1b_wb_bids = c1b_wb["bids"]
average_nums = get_average_number(len(c1b_wb_apps),
len(c1b_wb_bids))
bid_end = 0
for b_index,bid in enumerate(c1b_wb_bids):
bid_start = bid_end
bid_end = bid_start + average_nums[b_index]
bid_apps = c1b_wb_apps[bid_start:bid_end]
logging.info("c1b_分件:bid:%s,bid_apps:%s"%(bid, bid_apps))
with db.atomic():
app_sql = (Application.update(latest_bomber=bid,
status=ApplicationStatus.AB_TEST.value,
ptp_bomber=None)
.where(Application.id << bid_apps))
app_sql.execute()
params = {
"apps":bid_apps,
"partner_id": int(pid),
"bill_dict": bill_dict,
"period": cycle_upper.get(Cycle.C1B.value),
"bomber_id":bid
}
c1b_dispatch_in_record(**params)
try:
for aid in bid_apps:
dispatch_inserts = {
'application': aid,
'bomber': bid,
'partner': int(pid),
'status': DisAppStatus.NORMAL.value,
}
q = (DispatchApp.update(**dispatch_inserts)
.where(DispatchApp.application == aid)
.execute())
if not q:
DispatchApp.create(**dispatch_inserts)
except Exception as e:
logging.error("dispatchApp插入失败:%s"%str(e))
def allot_c3_case(out_data):
dispatch_inserts = []
for key, value in out_data.items():
if not value:
continue
bombers = (Bomber
.filter(Bomber.partner == key,
Bomber.status == BomberStatus.OUTER.value,
Bomber.is_del == 0))
bomber_ids = [b.id for b in bombers]
bomber = CycleIter(bomber_ids)
bills = BillService().bill_list(application_ids=value)
bill_dict = {bill['application_id']: bill for bill in bills}
for v in value:
bomber_id = bomber.__next__()
q = (DispatchApp.delete()
.where(DispatchApp.application == v)
.execute())
dispatch_inserts.append({
'application': v,
'bomber': bomber_id,
'partner': key,
})
# 对数据进行备份以备数据分析
application = (Application.filter(Application.id == v)).first()
application.latest_bomber = bomber_id
application.ptp_bomber = None
application.status = ApplicationStatus.AB_TEST.value
application.save()
# c3进入下一个cycle时逾期天数为90天
day_next_cycle = (90 - application.overdue_days)
DispatchAppHistory.create(
application=v,
partner_id=key,
bomber_id=bomber_id,
entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=(
application.amount -
Decimal(bill_dict[v].get('principal_paid'))),
entry_late_fee_pending=(
Decimal(bill_dict[v].get('late_fee')) -
Decimal(bill_dict[v].get('late_fee_paid'))),
expected_out_time=(
date.today() + timedelta(days=day_next_cycle))
)
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
# 获取只催单期的催收员
def get_cash_bomber(bids, cycle):
cash_bombers = (Bomber.select()
.where(Bomber.id << bids,
Bomber.is_del == 0,
Bomber.instalment != cycle))
cash_bids = [b.id for b in cash_bombers]
return cash_bids
# c1b 单期的件分件给内部员工
def dispatch_c1b_inner_apps(aids, bills, period=30):
# 获取需要分件的员工
bombers = (Bomber.select()
.where(Bomber.role_id == 5,
Bomber.is_del == 0,
Bomber.instalment == 0))
bids = [b.id for b in bombers]
if not aids or not bids:
return
avg_num = get_average_number(len(aids),len(bids))
end = 0
with db.atomic():
for index,b in enumerate(bids):
start = end
end = start + avg_num[index]
b_aids = aids[start:end]
app_sql = (Application.update(latest_bomber=b,
status=ApplicationStatus.AB_TEST.value,
ptp_bomber=None)
.where(Application.id << b_aids))
app_sql.execute()
params = {
"apps": b_aids,
"bill_dict": bills,
"period": period,
"bomber_id": b
}
c1b_dispatch_in_record(**params)
# 将分期的件分配给员工
def dispatch_instalment_app():
cycle_list = [Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value,Cycle.M3.value]
# 获取每天,获取每个cycle没有分出去的件
for cycle in cycle_list:
apps = (Application.select()
.where(Application.cycle == cycle,
Application.latest_bomber.is_null(True),
Application.status != ApplicationStatus.REPAID.value,
(Application.type ==
ApplicationType.CASH_LOAN_STAGING.value)))
aids = [a.id for a in apps]
if not aids:
continue
# 获取指定的bomber
bombers = (Bomber.select()
.where(Bomber.is_del == 0,
Bomber.instalment == cycle))
bids = [b.id for b in bombers]
if not bids:
continue
average_nums = get_average_number(len(apps),len(bids))
end = 0
for i,bid in enumerate(bids):
start = end
end = start + average_nums[i]
bid_apps = aids[start:end]
with db.atomic():
# 更新状态
q = (Application.update(ptp_bomber = None,
latest_bomber = bid, #最新的催收员id
last_bomber = Application.latest_bomber,#前一接收的催收员
status = ApplicationStatus.AB_TEST.value)#人工维护的件
.where(Application.id << bid_apps)
.execute())
record_param = {"cycle": cycle,
"application_ids": bid_apps,
"dest_bomber_id": bid}
out_and_in_record_instalment(**record_param)
# 分期的入案和出案
def out_and_in_record_instalment(**kwargs):
if not kwargs.get("application_ids"):
return
# 先出案
out_q = (DispatchAppHistory.update(out_at = fn.NOW())
.where(DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True))
.execute())
# 入案
cycle_period = {
1: '10',
2: '30',
3: '60',
4: '90'
}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(kwargs['dest_bomber_id'])).alias('bomber_id'),
fn.NOW().alias('entry_at'),
Application.overdue_days.alias('entry_overdue_days'),
R(str(kwargs['dest_partner_id'])).alias('partner_id'),
(SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'
' t1.overdue_days) DAY)' % period))
.alias('expected_out_time'))
.where(Application.status != ApplicationStatus.REPAID.value,
Application.id << kwargs['application_ids']))
application_list = list(subquery)
for idx in range(0, len(application_list), 50):
applications = application_list[idx:idx + 50]
app_ids = [i.application_id for i in applications]
# 获取所有的overdue_bill
overdue_bills = (OverdueBill.select()
.where(OverdueBill.collection_id << app_ids))
sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]
bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)
insert_args = lambad_instalment_result(bill_list, applications)
if not insert_args:
continue
DispatchAppHistory.insert_many(insert_args).execute()
#分期入案结果格式化
def lambad_instalment_result(bill_list,applications):
bill_dict = {}
insert_args = []
# 计算入案金额
for sub_bill in bill_list:
bill_id = sub_bill["bill_id"]
principal_pending = sub_bill["amount"] - sub_bill['principal_paid']
late_fee_pending = sub_bill["late_fee"] - sub_bill["late_fee_paid"]
if bill_id in bill_dict:
bill_dict[bill_id]["entry_principal_pending"] += principal_pending
bill_dict[bill_id]["entry_late_fee_pending"] += late_fee_pending
else:
bill_dict[bill_id] = {
"entry_principal_pending": principal_pending,
"entry_late_fee_pending": late_fee_pending
}
for app in applications:
bill_entry = bill_dict.get(app.bill_id, {})
entry_principal_pending = bill_entry.get("entry_principal_pending", 0)
entry_late_fee_pending = bill_entry.get("entry_late_fee_pending", 0)
insert_dict = {
'created_at': app.created_at,
'updated_at': app.updated_at,
'application': app.application_id,
'bomber_id': app.bomber_id,
'entry_at': app.entry_at,
'entry_overdue_days': app.entry_overdue_days,
'partner_id': app.partner_id,
'expected_out_time': app.expected_out_time,
'entry_principal_pending': entry_principal_pending,
'entry_late_fee_pending': entry_late_fee_pending
}
insert_args.append(insert_dict)
return insert_args
def c1b_dispatch_in_record(**kwargs):
app_ids = kwargs.get("apps")
partner_id = kwargs.get("partner_id","null")
bill_dict = kwargs.get("bill_dict")
period = kwargs.get("period")
bomber_id = kwargs.get('bomber_id')
if not all([app_ids, partner_id, bill_dict, period]):
return False
bill_dict = { str(k):v for k,v in bill_dict.items()}
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'),
Application.overdue_days.alias('entry_overdue_days'),
R(str(partner_id)).alias('partner_id'),
(SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'
' t1.overdue_days) DAY)' % period))
.alias('expected_out_time'))
.where(Application.id << app_ids))
application_list = list(subquery)
for idx in range(0,len(application_list),1000):
applications = application_list[idx:idx+1000]
insert_args = list(map(partial(lambda_result,
dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
#获取联系的电话号码
@action(MessageAction.BOMBER_AUTO_CALL_CONTACT)
def bomber_auto_call_contact(payload, msg_id):
application_list = payload['application_list']
applications = []
for app_id in application_list:
applications.append(Application.filter(Application.id == app_id)
.first())
# 得到每个件的联系人队列
with db.atomic():
for application in applications:
cycle = application.cycle
# 修改查询时的条件
contacts = (
Contact
.select()
.where(Contact.user_id == application.user_id,
Contact.latest_status.not_in(ContactStatus.no_use()))
.order_by(-Contact.useful,
Contact.relationship,
-Contact.total_duration,
-Contact.total_count)
)
level1 = []
level2 = []
level3 = []
level = []
for c in contacts:
if c.relationship == Relationship.APPLICANT.value:
level.append(c)
elif c.relationship == Relationship.FAMILY.value:
level1.append(c)
elif c.relationship == Relationship.COMPANY.value:
level2.append(c)
elif c.relationship == Relationship.SUGGESTED.value:
level3.append(c)
contacts = level + level2 + level1 + level3
numbers = []
fc_count = 0
# Pre-check if need phone calls,校验手机号是否可以拨通
app_calls = []
need_verify = False
for eac_contact in contacts:
if (eac_contact.relationship == Relationship.FAMILY.value and
eac_contact.useful == ContactsUseful.NONE.value):
need_verify = True
break
if need_verify:
logging.info('Found contact need update. app id {}'
.format(str(application.id)))
app_calls = AuditService().phone_invalid(cat=Relationship(1).name,
application_id=application.external_id)
call_history = True
c1b_family_dict = defaultdict(list)
for c in contacts:
if c.relationship == Relationship.COMPANY.value:
if cycle == Cycle.C1A.value:
call_history = check_call_history(application)
break
if cycle == Cycle.C1B.value:
# 暂时c1b公司只打本人填写的电话
if c.source != CompanyContactType.BASIC_INFO_JOB_TEL.value:
continue
if c.relationship == Relationship.FAMILY.value:
if cycle == Cycle.C1A.value:
call_history = check_call_history(application)
break
# Update contact useful
if c.useful == ContactsUseful.NONE.value:
c.useful = check_valid_phone(app_calls, c)
c.save()
if c.useful == ContactsUseful.INVALID.value:
logging.info('Found invalid contact. {}'
.format(str(c.id)))
continue
# 需要对family类进行排序
if cycle == Cycle.C1B.value:
c1b_family_dict[c.source].append(c.number)
continue
if c.relationship == Relationship.SUGGESTED.value:
if cycle not in (Cycle.C2.value, Cycle.C3.value):
break
if cycle == Cycle.C2.value and fc_count > 10:
break
if cycle == Cycle.C3.value and fc_count > 20:
break
fc_count += 1
numbers.append(c.number)
# if cycle1 applicant is in no_use add ec
if len(numbers) == 0 or not call_history:
src_contact = (
Contact.select()
.where(Contact.user_id == application.user_id,
Contact.source in FamilyContactType.c1a_order()))
# C1A五天内催收电话没打通,按新的顺序拨打;由原来的2种变更为4种
c1a_family_dict = defaultdict(list)
for e in src_contact:
c1a_family_dict[e.source].append(e.number)
for call_type in FamilyContactType.c1a_order():
numbers.extend(c1a_family_dict[call_type])
if cycle == Cycle.C1B.value:
for call_type in FamilyContactType.c1b_order():
numbers.extend(c1b_family_dict[call_type])
numbers = list(set(numbers))
update_query = (
AutoCallList
.update(numbers=','.join(numbers))
.where(AutoCallList.application == application.id)
)
update_query.execute()
def check_valid_phone(phone_list, contact):
useful = ContactsUseful.AVAILABLE.value
for each_phone in phone_list:
if contact.number == each_phone.get('tel_no') or \
contact.number == each_phone.get('mobile_no'):
useful = ContactsUseful.INVALID.value
break
return useful
# c1a的件如果5天之内没有接通,开放ec
def check_call_history(application):
app_create_at = application.created_at + timedelta(days=4)
if datetime.today().date() > app_create_at.date():
call_actions = (CallActions.select()
.where(CallActions.type == 0,
CallActions.application == application.id,
CallActions.created_at >
(datetime.now() - timedelta(days=5))))
for call in call_actions:
if call.phone_status == PhoneStatus.CONNECTED.value:
return True
return False
return True
#当前时间与更新时间间隔超过 SCAVENGER_TIME 时间时,SCAVENGER更新状态
@action(MessageAction.BOMBER_SCAVENGER)
def scavenger(payload, msg_id):
scavenger_time = -60
scavenger = (SystemConfig.select()
.where(SystemConfig.key == 'SCAVENGER_TIME')
.first())
if scavenger and scavenger.value.isdigit():
scavenger_time = -int(scavenger.value)
update_auto_call_list = (
AutoCallList
.update(status=AutoListStatus.PENDING.value,
description='scavenger')
.where(
AutoCallList.status == AutoListStatus.PROCESSING.value,
AutoCallList.updated_at <
datetime.now() + timedelta(minutes=scavenger_time),
)
)
count = update_auto_call_list.execute()
logging.info('scavenger processed %s application', count)
# 更新自动外呼中状态是邮箱的件的状态
mail_box_scavenger_time = -30
mail_box_scavenger = (SystemConfig.select()
.where(SystemConfig.key == 'MAIL_BOX_SCAVENGER_TIME')
.first())
if mail_box_scavenger and mail_box_scavenger.value.isdigit():
mail_box_scavenger_time = -int(mail_box_scavenger.value)
update_mail_box_call_list = (
AutoCallList.update(status=AutoListStatus.PENDING.value)
.where(AutoCallList.status == AutoListStatus.MAILBOX.value,
AutoCallList.updated_at <
datetime.now() + timedelta(minutes=mail_box_scavenger_time))
)
mail_box_count = update_mail_box_call_list.execute()
logging.info("scavenger update mail box %s", mail_box_count)
# ivr中30分钟没有接收到回调,修改ivr中的状态
update_auto_ivr = (
AutoIVR
.update(status=AutoIVRStatus.AVAILABLE.value)
.where(AutoIVR.status == AutoIVRStatus.PROCESSING.value,
AutoIVR.updated_at < datetime.now() + timedelta(minutes=-30)
)
)
ivr_result = update_auto_ivr.execute()
logging.info("scavenger update %s ivr"%ivr_result)
@action(MessageAction.BOMBER_CLEAR_OVERDUE_PTP)
def bomber_clear_overdue_ptp(payload, msg_id):
# 对于C1B, C2 和 C3 不存在预测试呼出,故其ptp清除后需回到外包或ab_test
#C1B, C2,C3 件,当前时间超过承诺还款时间时,转为人工维护
update_overdue_ptp_ab = (
Application.update(
status=ApplicationStatus.AB_TEST.value,
).where(
fn.DATE(Application.promised_date) < datetime.today().date(),
Application.status == ApplicationStatus.PROCESSING.value,
Application.cycle << [Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value]
)
)
count1 = update_overdue_ptp_ab.execute()
logging.info('bomber overdue ptp for C1B C2 and C3 cleared: %s', count1)
now_and_yesterday = ((datetime.today() + timedelta(days=1)).date(),
datetime.today().date())
overdue_1a1b_cs_ptp = (CallActions
.select()
.where(fn.DATE(CallActions.promised_date)
.in_(now_and_yesterday),
CallActions.bomber_id == 72))
update_overdue_1a1b_cs_ptp = (
Application
.update(status=ApplicationStatus.UNCLAIMED.value)
.where(Application.status == ApplicationStatus.PROCESSING.value,
Application.cycle == Cycle.C1A.value,
Application.id.in_(overdue_1a1b_cs_ptp)))
logging.debug("bomber c1a c1b cs ptp: %s", update_overdue_1a1b_cs_ptp)
count2 = update_overdue_1a1b_cs_ptp.execute()
logging.info('bomber c1a c1b cs overdue ptp cleared: %s', count2)
update_overdue_ptp = (
Application
.update(
status=ApplicationStatus.UNCLAIMED.value,
).where(
fn.DATE(Application.promised_date) < datetime.today().date(),
Application.status == ApplicationStatus.PROCESSING.value,
Application.cycle == Cycle.C1A.value,
)
)
count = update_overdue_ptp.execute()
logging.info('bomber overdue ptp cleared: %s', count)
@action(MessageAction.REPORT_BOMBER_COLLECTION)
def report_bomber_collection(payload, msg_id):
start_date = (ReportCollection
.select(fn.MAX(ReportCollection.apply_date))
.scalar())
now = datetime.now()
if start_date and str(start_date) == str(now)[:10]:
return
end_date = str(now + timedelta(days=1))[:10]
start_date = str(now)[:10]
dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))
all_overdue_loan_sql1 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
GROUP BY 1;
"""
s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()
d1 = OperatedDict(s_data1)
all_overdue_loan_sql2 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE DATE(ba.follow_up_date) > CURDATE()
AND ba.called_counts = 0
GROUP BY 1;
"""
s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()
d2 = OperatedDict(s_data2)
overdue_loans_entered_into_predict_call_system_sql = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE ba.called_counts >= 1
GROUP BY 1;
"""
s_data3 = readonly_db.execute_sql(
overdue_loans_entered_into_predict_call_system_sql).fetchall()
d3 = OperatedDict(s_data3)
loans_completed_sql = """
SELECT ba.cycle, COUNT(DISTINCT ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()
d4 = OperatedDict(s_data4)
connected_calls_automatic_sql = """
SELECT ba.cycle, COUNT(ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()
d5 = OperatedDict(s_data5)
connected_calls_manual_sql = """
SELECT bb.cycle, COUNT(bb.id)
FROM bomber.bombing_history bb
WHERE DATE(bb.created_at) = curdate()
AND (bb.bomber_id < 150 OR bb.bomber_id > 200)
GROUP BY bb.cycle;
"""
s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()
d6 = OperatedDict(s_data6)
logging.info('Directly get data from database successfully.')
c1 = d1 - d2
c2 = d3
c3 = c2 / c1
c4 = d4
c5 = c4 / c2
c6 = d5
c7 = c6 / c4
c8 = d6
c9 = OperatedDict(get_agent())
c10 = (c6 + c8) / c9
try:
c11 = average_call_duration_team(start_date, end_date)
except AttributeError:
c11 = {}
lst = []
for i in range(1, 5):
lst.append({
'apply_date': start_date,
'cycle': dct[i],
'all_overdue_loan': c1.get(i, 0),
'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),
'of_overdue_loans_entered_into_predict_call_system':
round(c3.get(i, 0) * 100, 1),
'loans_completed': c4.get(i, 0),
'of_completed_loans_in_predict_call_system':
round(c5.get(i, 0) * 100, 1),
'connected_calls_automatic': c6.get(i, 0),
'connected_calls_automatic_completed_loans':
round(c7.get(i, 0), 1),
'connected_calls_manual': c8.get(i, 0),
'agent': c9.get(i, 0),
'average_calls_agent': round(c10.get(i, 0), 1),
'average_call_duration_team': round(c11.get(i, 0), 1)
})
ReportCollection.insert_many(lst).execute()
logging.info('report_bomber_collection:Done!')
@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)
def bomber_auto_call_list_record(payload, msg_id):
"""记录一年的auto_call_list,删除前一天的数据,增加今天的数据"""
now = datetime.now()
if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):
date_sql = """
SELECT DATE(created_at) FROM auto_call_list_record
GROUP BY DATE(created_at) limit 1
"""
del_date = db.execute_sql(date_sql).fetchone()[0]
del_sql = """
DELETE FROM auto_call_list_record WHERE date(created_at) = %s
"""
db.execute_sql(del_sql, [del_date])
sql = """
INSERT INTO auto_call_list_record
SELECT * FROM auto_call_list
"""
db.execute_sql(sql)
logging.info("bomber_auto_call_list_record done")
@action(MessageAction.BOMBER_MANUAL_CALL_LIST)
def bomber_manual_call_list(payload, msg_id):
"""
手动分件主要依赖
:param payload:
:param msg_id:
:return:
"""
batch_id = payload.get('batch_id')
if batch_id is None:
logging.warning('Invalid batch id')
return
query = (ManualCallList
.select()
.where(ManualCallList.batch_id == batch_id,
ManualCallList.status << ManualCallListStatus.available()))
if not query.exists():
logging.warning('Empty application id list')
return
for q in query:
application_ids = json.loads(q.application_ids or '[]')
# where
cycle = 0
where_list = [(Application.id << application_ids),
Application.latest_bomber_id == q.src_bomber_id]
src_params = json.loads(q.src_params or '{}')
if "cycle" in src_params:
where_list.append(Application.cycle == src_params['cycle'])
cycle = src_params['cycle']
if "status" in src_params:
where_list.append(Application.status == src_params['status'])
# update
update_dict = {'latest_bomber': q.dest_bomber_id}
dest_params = json.loads(q.dest_params or '{}')
if "cycle" in dest_params:
update_dict['cycle'] = dest_params['cycle']
cycle = dest_params['cycle']
if "status" in dest_params:
update_dict['status'] = dest_params['status']
with db.atomic():
try:
# update dispatch_app
if q.update_dispatch_app:
if q.dest_partner_id is None:
raise ValueError('unallowed operation')
(DispatchApp
.delete()
.where(DispatchApp.application_id.in_(application_ids))
.execute())
(DispatchApp
.insert_many([{
'application': i,
'partner': q.dest_partner_id,
'bomber': q.dest_bomber_id,
'status': DisAppStatus.NORMAL.value}
for i in application_ids])
.execute())
application_success_row = (
Application
.update(**update_dict)
.where(*where_list)
.execute()
)
if application_success_row == 0:
raise ValueError('Invalid parameter')
(ManualCallList
.update(
status=ManualCallListStatus.SUCCESS.value,
length=application_success_row)
.where(ManualCallList.id == q.id)
.execute())
out_and_in_record(
src_bomber_id=q.src_bomber_id,
application_ids=application_ids,
dest_partner_id=q.dest_partner_id,
dest_bomber_id=q.dest_bomber_id,
cycle=cycle
)
except Exception:
db.rollback()
(ManualCallList
.update(
status=ManualCallListStatus.FAILED.value,
length=0)
.where(ManualCallList.id == q.id)
.execute())
logging.error("PRINT BOMBER_MANUAL_CALL_LIST ERROR:\n%s",
traceback.format_exc())
continue
def lambda_result(item, dct):
a = str(item.application_id)
entry_principal_pending = (Decimal(item.amount or 0) -
dct[a]['principal_paid'])
entry_late_fee_pending = dct[a]['late_fee'] - dct[a]['late_fee_paid']
return {
'created_at': item.created_at,
'updated_at': item.updated_at,
'application': a,
'bomber_id': item.bomber_id,
'entry_at': item.entry_at,
'entry_overdue_days': item.entry_overdue_days,
'partner_id': item.partner_id,
'expected_out_time': item.expected_out_time,
'entry_principal_pending': entry_principal_pending,
'entry_late_fee_pending': entry_late_fee_pending
}
def out_and_in_record(**kwargs):
"""
件在催收系统的出案和入案
"""
new_out_record(**kwargs)
new_in_record(**kwargs)
def new_out_record(**kwargs):
if not kwargs['application_ids']:
return
(DispatchAppHistory
.update(out_at=fn.NOW())
.where(DispatchAppHistory.bomber_id == kwargs['src_bomber_id'],
DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True))
.execute())
# 如果是月底分件,ptp_bomber不用置空
if kwargs.get("month_dispatch"):
return
# 出案时下p的件ptp_bomber置为空
try:
(Application.update(ptp_bomber=None)
.where(Application.id << kwargs["application_ids"])
.execute())
except Exception as e:
logging.error("new_out_record error:aids:%s,error:%s" %
(kwargs["application_ids"],str(e)))
def new_in_record(**kwargs):
cycle_period = {
1: '10',
2: '30',
3: '60',
4: '90'
}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(kwargs['dest_bomber_id'])).alias('bomber_id'),
fn.NOW().alias('entry_at'),
Application.overdue_days.alias('entry_overdue_days'),
R(str(kwargs['dest_partner_id'])).alias('partner_id'),
(SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'
' t1.overdue_days) DAY)' % period))
.alias('expected_out_time'))
.where(Application.status != ApplicationStatus.REPAID.value,
Application.id << kwargs['application_ids']))
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
app_ids = [i.application_id for i in applications]
bill_list = BillService().bill_list(application_ids=app_ids)
bill_dict = {str(bill['application_id']): bill for bill in bill_list}
insert_args = list(map(partial(lambda_result,
dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
def end_old_application(old_app, paid=False):
if paid:
if old_app.status == OldLoanStatus.WAITING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return old_app.application_id
end_date = old_app.end_date
now = datetime.now()
if now >= max(end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
old_app.save()
return old_app.application_id
@action(MessageAction.UPDATE_OLD_LOAN_APPLICATION)
def update_old_loan_application(payload, msg_id):
items = (Application
.select(Application, OldLoanApplication)
.join(OldLoanApplication,
JOIN_INNER,
on=(Application.id ==
OldLoanApplication.application_id).alias('old_app'))
.where(OldLoanApplication.status
.in_(OldLoanStatus.available())))
out_list = []
for application in items:
if application.overdue_days > 90:
if application.old_app.status == OldLoanStatus.WAITING.value:
start_old_application(application.old_app)
else:
out_list.append(application.old_app)
success_list = [end_old_application(item) for item in out_list]
app_ids = list(filter(None, success_list))
if app_ids:
bomber_id = SpecialBomber.OLD_APP_BOMBER.value
out_record(src_bomber_id=bomber_id, application_ids=app_ids)
def in_record(**kwargs):
"""
:param kwargs: dist_partner_id, dist_bomber_id,
expected_out_time, application_ids
:return:
"""
# TODO: 入案记录统一
kwargs['dist_partner_id'] = kwargs.get('dist_partner_id') or 'null'
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(kwargs['dist_bomber_id'])).alias('bomber_id'),
fn.NOW().alias('entry_at'),
Application.overdue_days.alias('entry_overdue_days'),
R(str(kwargs['dist_partner_id'])).alias('partner_id'),
R('"{}"'.format(kwargs['expected_out_time']))
.alias('expected_out_time'))
.where(Application.status != ApplicationStatus.REPAID.value,
Application.id << kwargs['application_ids']))
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
app_ids = [i.application_id for i in applications]
bill_list = BillService().bill_list(application_ids=app_ids)
bill_dict = {str(bill['application_id']): bill for bill in bill_list}
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
def out_record(**kwargs):
"""
:param kwargs: src_bomber_id, application_ids
:return:
"""
# TODO: 出案记录统一
if not kwargs.get('application_ids'):
return
(DispatchAppHistory
.update(out_at=fn.NOW())
.where(DispatchAppHistory.bomber_id == kwargs['src_bomber_id'],
DispatchAppHistory.application << kwargs['application_ids'])
.execute())
# 出案时下p的件ptp_bomber置为空
try:
(Application.update(ptp_bomber=None)
.where(Application.id << kwargs["application_ids"])
.execute())
except Exception as e:
logging.error("out_record error:aids:%s,error:%s" %
(kwargs["application_ids"], str(e)))
def start_old_application(old_app, cancel=False):
application_id = old_app.application_id
if cancel and (old_app.status == OldLoanStatus.PAID.value):
now = datetime.now()
if old_app.start_date is None:
# 未进入500的池子里
old_app.status = OldLoanStatus.WAITING.value
elif now >= max(old_app.end_date, old_app.promised_date or now):
# 撤销时用户已经从500的池子出去
old_app.status = OldLoanStatus.FINISHED.value
(DispatchAppHistory
.update(out_at=max(old_app.end_date,
old_app.promised_date or now))
.where(DispatchAppHistory.bomber_id == old_app.bomber_id,
DispatchAppHistory.application == application_id)
.execute())
else:
# 还在500的池子
old_app.status = OldLoanStatus.PROCESSING.value
(DispatchAppHistory
.update(out_at=None)
.where(DispatchAppHistory.bomber_id == old_app.bomber_id,
DispatchAppHistory.application == application_id)
.execute())
old_app.save()
return
application = (
Application
.get_or_none(Application.id == application_id,
Application.status != ApplicationStatus.REPAID.value,
Application.overdue_days > 90,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) <
datetime.today().date())))
if not application:
logging.error("Can not set old application %s to start collecting",
application_id)
return
if old_app.status in OldLoanStatus.no_available():
logging.info("%s has finished or paid", old_app.application_id)
return
config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)
sp = config.get(SCI.OLD_APP_PERIOD,
SCI.OLD_APP_PERIOD.default_value)
old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value
old_app.status = OldLoanStatus.PROCESSING.value
old_app.bomber_id = old_app_bomber
old_app.start_date = datetime.now()
# 此处需要判断end_date是否已经被设置过
if not old_app.end_date:
old_app.end_date = datetime.now() + timedelta(days=sp)
old_app.save()
in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,
application_ids=[old_app.application_id],
expected_out_time=str(old_app.end_date))
@action(MessageAction.OLD_LOAN_APPLICATION)
def old_loan_application(payload, msg_id):
application_id = payload.get('application_id')
numbers = payload.get('numbers', [])
if not (application_id and numbers):
logging.error("empty application id: %s, or invalid numbers: %s",
application_id, numbers)
application = Application.get_or_none(Application.id == application_id)
if (application and
application.status == ApplicationStatus.REPAID.value):
logging.error("application %s has paid", application_id)
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
raise RuntimeError('Get golden eye user failed. {}'
.format(str(application_id)))
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
user_name = gold_app['id_name']
# 通过bill获取账单类型,如果是分期的账单不关联OldloanApplication
try:
bill = BillService().bill_dict(application_id=application_id)
except Exception:
logging.error(
'application %s get bill info failed,old_loan_application',
application_id)
return
source_contacts = (Contact
.filter(Contact.user_id == user_id,
Contact.relationship ==
Relationship.APPLICANT.value,
Contact.source ==
ApplicantSource.NEW_APPLICANT.value))
source_contact_set = {i.number for i in source_contacts}
# 如果是分期不做一下操作
if bill["category"] != ApplicationType.CASH_LOAN_STAGING.value:
# 获取已有new applicant号码
old_app = OldLoanApplication.get_or_none(
OldLoanApplication.application_id == application_id,
OldLoanApplication.status.in_(OldLoanStatus.available())
)
if not old_app:
old_app = OldLoanApplication.create(application_id=application_id,
user_id=user_id,
numbers=','.join(numbers))
else:
_numbers = old_app.numbers.split(',')
# 去重并且删除空号码
old_app.numbers = ','.join(set([nu for nu in (_numbers + numbers)
if nu]))
# 已入催件end_date + 7
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.end_date = old_app.end_date + timedelta(days=7)
old_app.save()
new_contact = set(numbers) - source_contact_set
insert_args = [{'user_id': user_id,
'name': user_name,
'number': i,
'relationship': Relationship.APPLICANT.value,
'source': ApplicantSource.NEW_APPLICANT.value,
'real_relationship': Relationship.APPLICANT.value
} for i in new_contact]
if insert_args:
Contact.insert_many(insert_args).execute()
if bill["category"] == ApplicationType.CASH_LOAN_STAGING.value:
return
start_old_application(old_app)
def run_one_sql(sql):
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
result = cursor.fetchone()[0] / 1000000
except Exception as e:
logging.info('run sql error: %s' % str(sql))
result = Decimal(0)
return result
def run_member_sql(sql):
result = [0, 0]
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
sql_result = cursor.fetchone()
if sql_result:
result = sql_result
except Exception as e:
logging.info('run sql error: %s' % str(sql))
return result
def run_all_sql(sql):
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
result = cursor.fetchall()
except Exception as e:
logging.info('run sql error: %s' % str(sql))
result = []
return result
# 得到dpd1-3的待催维度recover_rate(废弃)
def get_before_bomber(date_time):
begin_time = str(date_time - timedelta(days=7))
end_time = str(date_time)
# 得到每周一已存在的件的待催金额
old_sql = """
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at<date_add('%s',interval 1 day)
and overdue_days in (2,3)
""" % (begin_time, begin_time)
old_data = run_one_sql(old_sql)
# 得到每天新达到dpd1的待催件的金额
new_sql = """
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at> '%s'
and created_at<'%s'
and overdue_days=1;
""" % (begin_time, end_time)
new_data = run_one_sql(new_sql)
# 计算每天进入dpd4的金额
dpd4_sql = """
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>date_add('%s',interval 1 day)
and created_at< date_add('%s',interval 1 day)
and overdue_days=4;
""" % (begin_time, end_time)
dpd4_data = run_one_sql(dpd4_sql)
# 周一时的dpd2\3待还
dpd2_sql = """
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at< date_add('%s',interval 1 day)
and overdue_days in (2,3)
""" % (end_time, end_time)
dpd2_data = run_one_sql(dpd2_sql)
all_money = old_data + new_data
repayment = all_money - dpd4_data - dpd2_data
pro = 0
if all_money:
pro = (repayment / all_money) * 100
RepaymentReport.create(
time=begin_time,
cycle=0,
all_money=all_money,
proportion=pro,
repayment=repayment
)
# 每周刷新一次recover_rate报表数据(待催维度)
@action(MessageAction.RECOVER_RATE_WEEK_MONEY)
def recover_rate_week_money(payload, msg_id):
#获取当天RECOVER_RATE_WEEK_MONEY日志次数
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= date.today(),
WorkerLog.action == 'RECOVER_RATE_WEEK_MONEY')
.first())
if worker_log.logs >= 5:
return
logging.info('start cal recover_rate_week_money')
date_time = date.today()
get_every_cycle_report(date_time)
# 得到入催維度的dpd1-3的recover_rate
def get_before_bomber_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
for i in range(2, 5):
money_sql = """
select
sum(bo1.principal_pending+bo1.late_fee_pending+
bo1.interest_pending) as dpd1_pending,
sum(bo2.principal_pending+bo2.late_fee_pending+
bo2.interest_pending) as dpd4_pending
from bill_java.overdue bo1
left join dashboard.application da
on bo1.application_id=da.id
left join bill_java.overdue bo2
on bo1.application_id=bo2.application_id
and bo2.overdue_days=%s and bo2.status = 1
where bo1.overdue_days=1
and bo1.status = 1
and bo1.which_day_overdue>='%s'
and bo1.which_day_overdue<'%s'
and da.is_first_loan = %s
and bo1.stage_num is null
""" % (i, begin_date, end_date, is_first_loan)
try:
cursor = readonly_db.get_cursor()
cursor.execute(money_sql)
money = cursor.fetchone()
all_money = money[0] / 1000000
dpd4_money = money[1] / 1000000
except Exception as e:
logging.info('get all_money error: %s' % str(e))
all_money = 0
dpd4_money = 0
repayment = all_money - dpd4_money
if begin_date == date_time - timedelta(days=1):
RepaymentReportInto.create(
time=begin_date,
cycle=0,
all_money=round(all_money, 3),
proportion='0',
repayment=round(repayment, 3),
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
else:
pro = '0'
if all_money:
pro = (repayment / all_money) * 100
pro = str(round(pro, 2))
RepaymentReportInto.update(
repayment=round(repayment, 3),
proportion=pro
).where(
RepaymentReportInto.time == begin_date,
RepaymentReportInto.cycle == 0,
RepaymentReportInto.is_first_loan == is_first_loan
).execute()
end_date = begin_date
begin_date = begin_date - timedelta(days=1)
# 得到c1a入催维度的recover_rate
def get_c1a_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=19)
repayment_sql = """
select
sum(b.principal_part+b.late_fee_part) as paid_amount,
cdt
from
(select
br.principal_part, br.late_fee_part,
date(cdt) as cdt, br.repay_at, br.application_id
from (
select ba.id, ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
repayment = run_all_sql(repayment_sql)
if not repayment:
return
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C1A.value,
all_money=round(all_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
for d in repayment:
repay = d[0] / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == d[1],
RepaymentReportInto.cycle == Cycle.C1A.value,
RepaymentReportInto.is_first_loan == is_first_loan
).first()
if report:
report.repayment = round(repay, 3)
pro = (repay / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
# 得到c1b入催维度的recover_rate
def get_c1b_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,c1b_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c1b_entry >= '%s'
and ba.c1b_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
all_money = run_one_sql(all_money_sql)
not_contain_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,c1b_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c1b_entry >= '%s'
and ba.c1b_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id and bd.partner_id=5)
) a
inner join bill_java.overdue o on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=22)
repayment_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount,et
from
(select br.principal_part, br.late_fee_part,
date(a.c1b_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c1b_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c1b_entry >= '%s'
and ba.c1b_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 2
group by 4, 5) b
group by 2;
""" % (begin_date, end_date, is_first_loan)
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c1b_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c1b_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c1b_entry >= '%s'
and ba.c1b_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=5)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 2
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_repay and not repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C1B.value,
all_money=round(not_contain_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.NOT_CONTAIN.value
)
for repay in not_contain_repay:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.is_first_loan == is_first_loan,
RepaymentReportInto.contain_out == i,
RepaymentReportInto.cycle == Cycle.C1B.value
).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C1B.value,
all_money=round(all_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.is_first_loan == is_first_loan,
RepaymentReportInto.contain_out == i,
RepaymentReportInto.cycle == Cycle.C1B.value
).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
# 得到c2入催维度的recover_rate
def get_c2_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
all_money = run_one_sql(all_money_sql)
not_contain_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=37)
repayment_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_money and repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C2.value,
all_money=round(not_contain_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.NOT_CONTAIN.value
)
for repay in not_contain_repay:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.is_first_loan == is_first_loan,
RepaymentReportInto.contain_out == i,
RepaymentReportInto.cycle == Cycle.C2.value
).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C2.value,
all_money=round(all_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.is_first_loan == is_first_loan,
RepaymentReportInto.contain_out == i,
RepaymentReportInto.cycle == Cycle.C2.value
).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
# 得到c2入催维度的recover_rate
def get_c3_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id, ba.c3_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=30)
repayment_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c3_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c3_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = '%s'
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 4
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
repayment = run_all_sql(repayment_sql)
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C3.value,
all_money=round(all_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
if not repayment:
return
for repay in repayment:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.cycle == Cycle.C3.value,
RepaymentReportInto.is_first_loan == is_first_loan
).first()
if report:
report.repayment = repay_money
pro = 0
if report.all_money and int(report.all_money):
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
# 每天刷新一次recover_rate报表数据(入催维度)
@action(MessageAction.RECOVER_RATE_WEEK_MONEY_INTO)
def recover_rate_week_money_into(payload, msg_id):
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= date.today(),
WorkerLog.action == 'RECOVER_RATE_WEEK_MONEY_INTO')
.first())
if worker_log and worker_log.logs >= 5:
return
date_time = date.today()
get_before_bomber_rate(date_time)
get_c1a_into_rate(date_time)
get_c1b_into_rate(date_time)
get_c2_into_rate(date_time)
get_c3_into_rate(date_time)
# 将已经成熟的数据从未成熟改为成熟
ripe_days = {0: 3, 1: 7, 2: 20, 3: 30, 4: 30}
for i in range(0, 5):
repe_date = date.today() - timedelta(days=ripe_days[i])
(RepaymentReportInto
.update(ripe_ind=RipeInd.RIPE.value)
.where(RepaymentReportInto.time < repe_date,
RepaymentReportInto.cycle == i)
).execute()
# ----------------- 计算summary_bomber中原summary存在的指标 --------------------
# 得到基础数据
def get_static_bomber(begin_date):
active_date = begin_date - timedelta(days=8)
bombers = (BomberR
.select(BomberR.id,
BomberR.role.alias('role'),
BomberR.last_active_at.alias('active'))
.where(BomberR.last_active_at > active_date,
BomberR.role << [1, 2, 4, 5, 6, 8,9]))
summary = []
for bomber in bombers:
summary.append({
'time': begin_date,
'bomber_id': bomber.id,
'cycle': bomber.role.cycle,
'work_ind': 0
})
SummaryBomber.insert_many(summary).execute()
# 部分指标须在当天晚上计算完成
@action(MessageAction.SUMMARY_CREATE)
def summary_create(payload, msg_id):
begin_date = date.today()
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= begin_date,
WorkerLog.action == 'SUMMARY_CREATE')
.first())
if worker_log and worker_log.logs >= 5:
return
get_static_bomber(begin_date)
# 得到当天工作的员工
def get_active_bomber(begin_date):
bombers = (BomberR
.select(BomberR.id)
.where(BomberR.last_active_at >= begin_date))
for bomber in bombers:
(SummaryBomber.update(work_ind=1)
.where(SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == bomber.id)
).execute()
# 得到每个催收员每天拨打电话数和拨打件数
@time_logger
def get_call_and_made(end_date, begin_date, real_time_query=False):
call_sql = """
select
bomber_id,
count(case when relationship is not null then application_id end)
as 'call_cnt',
count(distinct case when relationship is not null then
application_id end) as 'call_case',
count(case when phone_status=4 then application_id end) as 'connect',
count(distinct case when phone_status=4 then application_id end)
as 'connect_case'
from (
select bomber_id,application_id,phone_status, cycle, relationship
from bomber.call_actions ba
where created_at>'%s' and created_at<'%s'
and type in (0, 1)
) a
group by 1
""" % (begin_date, end_date)
calls = run_all_sql(call_sql)
if real_time_query:
return calls
for call in calls:
bomber, call_cnt, case_made, connect_cnt, case_connect = call
(SummaryBomber.update(
case_made_cnt=case_made,
call_cnt=call_cnt,
call_connect_cnt=connect_cnt,
case_connect_cnt=case_connect)
.where(
SummaryBomber.bomber_id == bomber,
SummaryBomber.time == begin_date)
).execute()
return calls
# 得到每个催收员每天待催件数
@time_logger
def get_claimed_cnt(end_date, begin_date, real_time_query=False):
table_date = begin_date - timedelta(days=30)
claimed_sql = """
SELECT
COUNT( `t1`.`application_id` ) AS cnt,
`t1`.`bomber_id` AS bomber_id
FROM
`dispatch_app_history` AS t1
WHERE
( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null )
AND ( `t1`.`bomber_id` != 1000 )
AND ( `t1`.`partner_id` IS null )
AND ( `t1`.`entry_at` > '%s' )
AND ( `t1`.`entry_at` < '%s' )
GROUP BY
`t1`.`bomber_id`
""" % (begin_date, table_date, end_date)
claimeds = run_all_sql(claimed_sql)
if real_time_query:
return claimeds
for claimed in claimeds:
cnt, bomber_id = claimed
(SummaryBomber.update(claimed_cnt=cnt)
.where(SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == bomber_id)
).execute()
return claimeds
# 得到短信相关数据
def get_sms_data(end_data, begin_data):
all_sms = (ConnectHistoryR
.select(ConnectHistoryR.operator.alias('bomber_id'),
fn.COUNT(ConnectHistoryR.application).alias('sms_send'))
.where(ConnectHistoryR.created_at > begin_data,
ConnectHistoryR.created_at < end_data,
ConnectHistoryR.type.in_(ConnectType.sms()))
.group_by(ConnectHistoryR.operator))
for sms in all_sms:
(SummaryBomber.update(sms_cnt=sms.sms_send)
.where(SummaryBomber.time == begin_data,
SummaryBomber.bomber_id == sms.bomber_id)
).execute()
return all_sms
# 得到ptp相关的数据
@time_logger
def get_ptp_data(end_date, begin_date, real_query_time=False):
sql = """
SELECT
a.bomber_id,
sum( a.promised_amount ) AS ptp_amount,
count( application_id )
FROM
bomber.auto_call_actions a
LEFT JOIN bomber.bomber c ON a.bomber_id = c.id
WHERE
a.created_at >= '%s'
AND a.created_at < '%s'
AND a.promised_date != ''
GROUP BY 1
UNION
SELECT
a.bomber_id,
ifnull( sum( a.promised_amount ), 0 ) AS ptp_amount,
count( application_id )
FROM
bomber.bombing_history a
LEFT JOIN bomber.bomber c ON a.bomber_id = c.id
WHERE
bomber_id NOT BETWEEN 151
AND 177
AND bomber_id NOT BETWEEN 181
AND 183
AND bomber_id != 72
AND a.created_at >= '%s'
AND a.created_at < '%s'
AND a.promised_date != ''
GROUP BY 1
""" % (begin_date, end_date, begin_date, end_date)
ptp_datas = run_all_sql(sql)
if real_query_time:
return ptp_datas
result = {}
for ptp in ptp_datas:
bomber_id, amount, cnt = ptp
if bomber_id in result.keys():
result[bomber_id][0] += amount
result[bomber_id][1] += cnt
continue
result[bomber_id] = [amount, cnt]
for key, value in result.items():
(SummaryBomber
.update(
promised_cnt=value[1],
promised_amount=value[0]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
return ptp_datas
# 统计回款金额和回款件数
@time_logger
def get_recover_amount(end_date, begin_date, real_time_query=False):
C1_sql = """
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,
count(distinct application_id)
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,4)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
""" % (begin_date, end_date)
C1_results = run_all_sql(C1_sql)
if not real_time_query:
for C1_result in C1_results:
bomber_id, amount, cnt = C1_result
(SummaryBomber.update(
cleared_cnt=cnt,
cleared_amount=amount
).where(
SummaryBomber.bomber_id == bomber_id,
SummaryBomber.time == begin_date
)).execute()
other_sql = """
select current_bomber_id,sum(pay_amount) as pay_amount,
count(distinct application_id)
from (
select application_id,current_bomber_id,pay_amount,repay_at
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (2,3,5,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
""" % (begin_date, end_date)
sql_results = run_all_sql(other_sql)
if not real_time_query:
for sql_result in sql_results:
bomber_id, amount, cnt = sql_result
(SummaryBomber.update(
cleared_cnt=cnt,
cleared_amount=amount
).where(
SummaryBomber.bomber_id == bomber_id,
SummaryBomber.time == begin_date
)).execute()
result = sql_results + C1_results
return result
# summary 报表新数据(分布计算,先计算一部分数据)
@action(MessageAction.SUMMARY_NEW)
def summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= end_date,
WorkerLog.action == 'SUMMARY_NEW')
.first())
if worker_log and worker_log.logs >= 5:
return
get_active_bomber(begin_date)
get_call_and_made(end_date, begin_date)
get_claimed_cnt(end_date, begin_date)
get_sms_data(end_date, begin_date)
get_ptp_data(end_date, begin_date)
get_recover_amount(end_date, begin_date)
get_unfollowed(begin_date)
get_unfollowed_call(begin_date)
# ------------------------ 计算summary bomber的另部分指标 ----------------------
# 得到新件件数和金额
def get_new_case_amount(begin_date, end_date):
all_case = (DispatchAppHistoryR
.select(fn.SUM(DispatchAppHistoryR.entry_late_fee_pending +
DispatchAppHistoryR.entry_principal_pending)
.alias('pending'),
DispatchAppHistoryR.bomber_id,
fn.COUNT(DispatchAppHistoryR.application).alias('cnt'))
.where(DispatchAppHistoryR.entry_at > begin_date,
DispatchAppHistoryR.entry_at < end_date,
DispatchAppHistoryR.partner_id.is_null(True))
.group_by(DispatchAppHistoryR.bomber_id))
for case in all_case:
SummaryBomber.update(
new_case_amount_sum=case.pending,
new_case_cnt=case.cnt
).where(
SummaryBomber.bomber_id == case.bomber_id,
SummaryBomber.time == begin_date
).execute()
return all_case
# 得到KP相关数据
def get_kp_cleared(begin_date, end_date):
auto_call_sql = """
SELECT
a.current_bomber_id, count( b.application_id )
FROM
(SELECT
current_bomber_id, principal_part, late_fee_part,
repay_at, application_id
FROM
bomber.repayment_log
WHERE
repay_at >= '%s'
AND repay_at < '%s'
GROUP BY 4, 5 ) a
LEFT JOIN (
SELECT
cycle, bomber_id, promised_amount, promised_date,
application_id, created_at
FROM
bomber.auto_call_actions
WHERE
created_at >= date_sub( '%s', INTERVAL 7 DAY )
AND created_at < '%s'
AND promised_date IS NOT NULL
) b ON a.current_bomber_id = b.bomber_id
AND a.application_id = b.application_id
AND date( a.repay_at ) <= date( b.promised_date )
AND date( a.repay_at ) >= date( b.created_at )
LEFT JOIN bomber.bomber c ON a.current_bomber_id = c.id
WHERE
b.promised_date >= '%s'
GROUP BY 1
""" % (begin_date, end_date, begin_date, end_date, begin_date)
auto_call_results = run_all_sql(auto_call_sql)
manual_sql = """
SELECT
a.current_bomber_id, count( b.application_id )
FROM
(SELECT
current_bomber_id, principal_part, late_fee_part,
repay_at, application_id, created_at
FROM
bomber.repayment_log
WHERE
repay_at >= '%s'
AND repay_at < '%s'
AND principal_part + late_fee_part > 0
GROUP BY 2, 5 ) a
LEFT JOIN (
SELECT
cycle, bomber_id, promised_amount, promised_date,
application_id, created_at
FROM
bomber.bombing_history
WHERE
created_at >= date_sub( '%s', INTERVAL 7 DAY )
AND created_at < '%s'
AND promised_date IS NOT NULL
) b ON a.current_bomber_id = b.bomber_id
AND a.application_id = b.application_id
AND date( a.repay_at ) <= date( b.promised_date )
AND date( a.repay_at ) >= date( b.created_at )
LEFT JOIN bomber.bomber c ON a.current_bomber_id = c.id
WHERE
b.promised_date >= '%s'
GROUP BY 1
""" % (begin_date, end_date, begin_date, end_date, begin_date)
manual_results = run_all_sql(manual_sql)
sql_result = auto_call_results + manual_results
result = {}
for data in sql_result:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
for key, value in result.items():
(SummaryBomber
.update(
KP_cleared_cnt=value
).where(
SummaryBomber.bomber_id == key,
SummaryBomber.time == begin_date)
).execute()
# 得到当天处于ptp的件(KP率的分母)
def get_kp_today(begin_date, end_date):
sql = """
select bomber_id, count(distinct application_id)
from(
SELECT bomber_id, application_id
FROM bomber.auto_call_actions a
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where a.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))
UNION
SELECT bomber_id, application_id
FROM bomber.bombing_history b
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where b.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))) result
GROUP BY 1
""" % (begin_date, end_date, begin_date, begin_date, end_date, begin_date)
kp_today = run_all_sql(sql)
for kp in kp_today:
(SummaryBomber.update(
KP_today_cnt=kp[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == kp[0]
)).execute()
# 得到ptp相关信息(当日ptp到期件数、次日到期件数)
def get_ptp_cnt(begin_date, end_date):
today_due = []
for sql_date in (begin_date, end_date):
sql = """
select bomber_id,count(distinct application_id) as cnt from
( # 自动外呼中排除掉已经修改P期的件
select application_id,bomber_id,created_at
from bomber.auto_call_actions ba
where promised_date ='%s' # 需要过滤掉在手动中续P的
and not exists ( select 1 from bomber.bombing_history bb
where bb.application_id = ba.application_id
and bb.bomber_id = ba.bomber_id
and bb.created_at>ba.created_at
and bb.promised_date is not null
and bb.created_at < '%s')
union #历史记录,排除因为续P,导致这个件不在当日的P中
select b.application_id,b.bomber_id,a.cdt
from bomber.bombing_history b
inner join (
select application_id,bomber_id,max(created_at) as cdt
from bomber.bombing_history bb
where bb.created_at>date_sub('%s',interval 7 day)
and bb.created_at<'%s'
and promised_date is not null
group by 1,2) a
on b.application_id=a.application_id
and b.bomber_id=a.bomber_id and a.cdt=b.created_at
where b.promised_date ='%s'
union #当天下的当天的P
select b.application_id,b.bomber_id,b.created_at
from bomber.bombing_history b
where b.promised_date ='%s'
and b.created_at>'%s'
and b.created_at<date_add('%s',interval 1 day)
) a
where exists(select 1 from bomber.application ba
where ba.id=a.application_id
and ((ba.finished_at is null)
or (ba.finished_at > '%s')))
group by 1
""" % (sql_date, begin_date, begin_date, begin_date, sql_date,
sql_date, begin_date, begin_date, begin_date)
datas = run_all_sql(sql)
if sql_date == begin_date:
today_due = datas
for data in datas:
(SummaryBomber.update(
ptp_today_cnt=data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == data[0]
)).execute()
continue
nextday_due = datas
for data in datas:
(SummaryBomber.update(
ptp_next_cnt=data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == data[0]
)).execute()
return [today_due, nextday_due]
# 得到ptp维护的相关信息
def get_ptp_call_cnt(begin_date, end_date):
today_followed = []
for sql_data in (begin_date, end_date):
sql = """
select b.bomber_id,count(distinct b.application_id) as cnt
from (
select a.* from
(
select application_id,bomber_id,created_at
from bomber.auto_call_actions ba
where promised_date ='%s' # 需要过滤掉在手动中续P的
and not exists (select 1 from bomber.bombing_history bb
where bb.application_id = ba.application_id
and bb.bomber_id = ba.bomber_id
and bb.created_at>ba.created_at
and bb.promised_date is not null
and bb.created_at < '%s')
union #历史记录,排除因为续P,导致这个件不在当日的P中
select b.application_id,b.bomber_id,a.cdt
from bomber.bombing_history b
inner join (
select application_id,bomber_id,max(created_at) as cdt
from bomber.bombing_history bb
where bb.created_at>date_sub('%s',interval 7 day)
and bb.created_at<'%s'
and promised_date is not null
group by 1,2) a
on b.application_id=a.application_id
and b.bomber_id=a.bomber_id and a.cdt=b.created_at
where b.promised_date ='%s'
union #当天下的当天的P
select b.application_id,b.bomber_id,b.created_at
from bomber.bombing_history b
where b.promised_date ='%s'
and b.created_at>'%s'
and b.created_at<date_add('%s',interval 1 day)
) a
where exists(select 1 from bomber.application ba
where ba.id=a.application_id
and ((ba.finished_at is null)
or (ba.finished_at > '%s')))
and exists(select 1 from bomber.call_actions bc
where a.application_id = bc.application_id
and a.bomber_id = bc.bomber_id
and bc.created_at>'%s'
and bc.created_at< date_add('%s',interval 1 day)
and bc.created_at>=a.created_at)
union
select a.* from
(
select application_id,bomber_id,created_at
from bomber.auto_call_actions ba
where promised_date ='%s' # 需要过滤掉在手动中续P的
and not exists ( select 1 from bomber.bombing_history bb
where bb.application_id = ba.application_id
and bb.bomber_id = ba.bomber_id
and bb.created_at>ba.created_at
and bb.promised_date is not null
and bb.created_at < '%s')
union #历史记录,排除因为续P,导致这个件不在当日的P中
select b.application_id,b.bomber_id,a.cdt
from bomber.bombing_history b
inner join (
select application_id,bomber_id,max(created_at) as cdt
from bomber.bombing_history bb
where bb.created_at>date_sub('%s',interval 7 day)
and bb.created_at<'%s'
and promised_date is not null
group by 1,2) a
on b.application_id=a.application_id
and b.bomber_id=a.bomber_id and a.cdt=b.created_at
where b.promised_date ='%s'
union #当天下的当天的P
select b.application_id,b.bomber_id,b.created_at
from bomber.bombing_history b
where b.promised_date ='%s'
and b.created_at>'%s'
and b.created_at<date_add('%s',interval 1 day)
) a
where exists(select 1 from bomber.application ba
where ba.id=a.application_id
and ba.finished_at > '%s'
and ba.finished_at< date_add('%s',interval 1 day))
) b
group by 1
""" % (sql_data, begin_date, begin_date, begin_date, sql_data,
sql_data, begin_date, begin_date, begin_date, begin_date,
begin_date, sql_data, begin_date, begin_date, begin_date,
sql_data, sql_data, begin_date, begin_date, begin_date,
begin_date)
datas = run_all_sql(sql)
if sql_data == begin_date:
today_followed = datas
for data in datas:
(SummaryBomber.update(
ptp_today_call_cnt=data[1]
).where(
SummaryBomber.bomber_id == data[0],
SummaryBomber.time == begin_date
)).execute()
continue
nextday_followed = datas
for data in datas:
(SummaryBomber.update(
ptp_next_call_cnt=data[1]
).where(
SummaryBomber.bomber_id == data[0],
SummaryBomber.time == begin_date
)).execute()
return [today_followed, nextday_followed]
# 得到新件还款金额(只有c2、c3才有新件还款的概念)
def get_new_case_cleared(begin_date, end_date):
sql = """
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c2_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c3_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
) a
GROUP BY 1
""" % (begin_date, end_date, begin_date, end_date,begin_date, end_date)
case_cleared_sums = run_all_sql(sql)
for clear in case_cleared_sums:
(SummaryBomber.update(
new_case_cleared_sum=clear[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == clear[0]
)).execute()
# 新件当日维护件数
@time_logger
def get_new_case_call(begin_date, end_date, real_query_time=False):
sql = """
SELECT
bd.bomber_id,
count( DISTINCT bd.application_id )
FROM
bomber.dispatch_app_history bd
INNER JOIN bomber.call_actions bc
ON bd.application_id = bc.application_id
AND bd.bomber_id = bc.bomber_id
AND date( bd.entry_at ) = date( bc.created_at )
WHERE
entry_at > '%s'
AND entry_at < '%s'
AND partner_id IS NULL
GROUP BY 1
""" % (begin_date, end_date)
new_case_calls = run_all_sql(sql)
if real_query_time:
return new_case_calls
for call in new_case_calls:
(SummaryBomber.update(
new_case_call_cnt=call[1]
).where(
SummaryBomber.bomber_id == call[0],
SummaryBomber.time == begin_date
)).execute()
return new_case_calls
# 得到接通件均通话时长
@time_logger
def get_calltime_avg(begin_date, end_date, real_query_time=False):
autos_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction,
count( 1 ) AS auto_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
GROUP BY 1
""" % (begin_date, end_date)
autos = run_all_sql(autos_sql)
manual_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
AND an.recording is not null
GROUP BY 1
""" % (begin_date, end_date, '5%', '3%')
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]][0] += data[1]
result[data[0]][1] += data[2]
continue
result[data[0]] = [data[1], data[2]]
if real_query_time:
return result
for key, value in result.items():
(SummaryBomber.update(
calltime_case_sum=value[0],
calltime_case_cnt=value[1],
calltime_case_avg=value[0] / value[1] if value[1] else 0
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
return result
# 得到等待时长相关数据
def get_no_calltime_avg(begin_date, end_date):
manual_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND (an.status!='ANSWERED' or an.recording is null)
AND bb.id IS NOT NULL
GROUP BY 1
""" % (begin_date, end_date, '5%', '3%')
manuals = run_all_sql(manual_sql)
for data in manuals:
(SummaryBomber.update(
calltime_no_case_sum=data[1],
calltime_no_case_cnt=data[2],
calltime_no_case_avg=data[1] / data[2] if data[2] else 0
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == data[0]
)).execute()
# 得到通话总时长
@time_logger
def get_calltime_sum(begin_date, end_date, real_query_time=False):
autos_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND bb.id IS NOT NULL
GROUP BY 1
""" % (begin_date, end_date)
autos = run_all_sql(autos_sql)
manual_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND bb.id IS NOT NULL
GROUP BY 1
""" % (begin_date, end_date, '5%', '3%')
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
if real_query_time:
return result
for key, value in result.items():
(SummaryBomber.update(
calltime_sum=value
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
return result
# 当天未跟进的件
def get_unfollowed(begin_date):
sql = """
SELECT
bomber_id,
count(1)
FROM
(
SELECT
bd.application_id,
date(bd.entry_at) AS entry_at,
bd.bomber_id,
date(bd.out_at) AS out_at
FROM
bomber.dispatch_app_history bd
WHERE
(
out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)
OR out_at IS NULL
)
AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)
AND partner_id IS NULL
AND NOT EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
bd.bomber_id = bc.bomber_id
AND bc.application_id = bd.application_id
AND bc.created_at < '%(begin_date)s'
)
) a
GROUP BY
1
""" % {'begin_date': begin_date}
data = run_all_sql(sql)
result = defaultdict(int)
for d in data:
result[d[0]] += d[1]
bomber_list = []
for key, value in result.items():
bomber_list.append(key)
(SummaryBomber.update(
unfollowed_cnt=SummaryBomber.new_case_cnt + value
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
# 剩下bomber_id直接由new_case_cnt赋值
(SummaryBomber.update(
unfollowed_cnt=SummaryBomber.new_case_cnt
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id.not_in(bomber_list)
)).execute()
# 未跟进件中当天跟进件数
def get_unfollowed_call(begin_date):
sql = """
SELECT
bomber_id,
count(1)
FROM
(
SELECT
bd.application_id,
date(bd.entry_at) AS entry_at,
bd.bomber_id,
date(bd.out_at) AS out_at
FROM
bomber.dispatch_app_history bd
WHERE
(
out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)
OR out_at IS NULL
)
AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)
AND partner_id IS NULL
AND NOT EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
bd.bomber_id = bc.bomber_id
AND bc.application_id = bd.application_id
AND bc.created_at < '%(begin_date)s'
)
) a
WHERE
EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
a.application_id = bc.application_id
AND a.bomber_id = bc.bomber_id
AND bc.created_at > '%(begin_date)s'
AND bc.created_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND bc.created_at >= a.entry_at
)
OR EXISTS (
SELECT
1
FROM
bomber.application ba
WHERE
ba.id = a.application_id
AND ba.finished_at > '%(begin_date)s'
AND ba.finished_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
)
GROUP BY
1
""" % {'begin_date': begin_date}
data = run_all_sql(sql)
result = defaultdict(int)
for d in data:
result[d[0]] += d[1]
bomber_list = []
for key, value in result.items():
bomber_list.append(key)
(SummaryBomber.update(
unfollowed_call_cnt=SummaryBomber.new_case_call_cnt + value
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
# 剩下bomber_id直接由new_case_cnt赋值
update_sql = (SummaryBomber
.update(unfollowed_call_cnt=SummaryBomber.new_case_call_cnt)
.where(SummaryBomber.time == begin_date))
if bomber_list:
update_sql = update_sql.where(SummaryBomber.bomber_id
.not_in(bomber_list))
update_sql.execute()
return result
# summary 更新新的数据(计算summary_bomber的另一部分数据)
@action(MessageAction.UPDATE_SUMMARY_NEW)
def update_summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= end_date,
WorkerLog.action == 'UPDATE_SUMMARY_NEW')
.first())
if worker_log and worker_log.logs >= 5:
return
get_new_case_amount(begin_date, end_date)
get_kp_cleared(begin_date, end_date)
get_kp_today(begin_date, end_date)
get_ptp_cnt(begin_date, end_date)
get_ptp_call_cnt(begin_date, end_date)
get_new_case_cleared(begin_date, end_date)
get_new_case_call(begin_date, end_date)
get_calltime_avg(begin_date, end_date)
get_no_calltime_avg(begin_date, end_date)
get_calltime_sum(begin_date, end_date)
# -------------------------------- 得到cycle层的数据 --------------------------
def get_cycle_claimed(begin_date, end_date):
sql = """
select cycle,count(1)
from bomber.application where cycle in (1,2,3,4)
and (finished_at is null or (finished_at>'%s'))
and created_at>'2018-09-01'
group by 1
""" % begin_date
result = run_all_sql(sql)
return result
# 得到cycle层的新件件数和金额
@time_logger
def cycle_new_case(begin_date, end_date, real_time_query=False):
sql = """
SELECT
1 AS cycle,
count( ba.id ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.created_at ) = bo.which_day_overdue
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c1b_entry ) = bo.which_day_overdue
WHERE
c1b_entry > '%s'
AND c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c2_entry ) = bo.which_day_overdue
WHERE
c2_entry > '%s'
AND c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c3_entry ) = bo.which_day_overdue
WHERE
c3_entry > '%s'
AND c3_entry < '%s'
""" % (begin_date, end_date, begin_date, end_date,
begin_date, end_date, begin_date, end_date)
all_datas = run_all_sql(sql)
if real_time_query:
return all_datas
for data in all_datas:
(SummaryBomber.update(
new_case_amount_sum=data[2],
new_case_cnt=data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == data[0],
SummaryBomber.cycle == data[0]
)).execute()
return all_datas
# 新件当日维护件数
@time_logger
def get_cycle_new_case_call(begin_date, end_date, real_time_query=False):
sql = """
SELECT
1 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.created_at ) = date( bc.created_at )
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c1b_entry ) = date( bc.created_at )
WHERE
ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c2_entry ) = date( bc.created_at )
WHERE
ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c3_entry ) = date( bc.created_at )
WHERE
ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
""" % (begin_date, end_date, begin_date, end_date,
begin_date, end_date, begin_date, end_date)
cycle_datas = run_all_sql(sql)
if real_time_query:
return cycle_datas
for data in cycle_datas:
(SummaryBomber.update(
new_case_call_cnt=data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.cycle == data[0],
SummaryBomber.bomber_id == data[0]
)).execute()
return cycle_datas
def get_cycle_new_case_cleared(begin_date, end_date):
sql = """
SELECT
'1' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.created_at ) = date( br.repay_at )
WHERE ba.created_at > '%s'
AND ba.created_at < '%s'
GROUP BY 1, 2 ) a
UNION
SELECT
'2' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
GROUP BY 1, 2) a
""" % (begin_date, end_date, begin_date, end_date)
cycle_cleared = run_all_sql(sql)
for i in cycle_cleared:
(SummaryBomber.update(
new_case_cleared_sum=i[2]
).where(
SummaryBomber.cycle == i[0],
SummaryBomber.bomber_id == i[0],
SummaryBomber.time == begin_date
)).execute()
def get_cycle_case_made_cnt(begin_date, end_date):
sql = """
select cycle,count(distinct application) from (
select distinct cycle,application from bomber.auto_call_list_record
where created_at >= '%s'
and created_at < '%s'
and called_counts <> 0
and cycle in (1,2,3,4)
union
select distinct cycle,application_id from bomber.call_actions
where created_at >= '%s'
and created_at < '%s'
and cycle in (1,2,3,4)
) c
group by 1
""" % (begin_date, end_date, begin_date, end_date)
case_made_datas = run_all_sql(sql)
for case_made_data in case_made_datas:
(SummaryBomber.update(
case_made_cnt=case_made_data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.cycle == case_made_data[0],
SummaryBomber.bomber_id == case_made_data[0]
)).execute()
# 得到cycle維度的数据
@action(MessageAction.SUMMARY_NEW_CYCLE)
def summary_new_cycle(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= end_date,
WorkerLog.action == 'SUMMARY_NEW_CYCLE')
.first())
if worker_log and worker_log.logs >= 5:
return
cycle_datas = (SummaryBomber
.select(fn.SUM(SummaryBomber.new_case_amount_sum)
.alias('new_case_amount_sum'),
fn.SUM(SummaryBomber.new_case_cleared_sum)
.alias('new_case_cleared_sum'),
fn.SUM(SummaryBomber.case_made_cnt)
.alias('case_made_cnt'),
fn.SUM(SummaryBomber.case_connect_cnt)
.alias('case_connect_cnt'),
fn.SUM(SummaryBomber.promised_cnt)
.alias('promised_cnt'),
fn.SUM(SummaryBomber.promised_amount)
.alias('promised_amount'),
fn.SUM(SummaryBomber.cleared_cnt)
.alias('cleared_cnt'),
fn.SUM(SummaryBomber.cleared_amount)
.alias('cleared_amount'),
fn.SUM(SummaryBomber.new_case_cnt)
.alias('new_case_cnt'),
fn.SUM(SummaryBomber.new_case_call_cnt)
.alias('new_case_call_cnt'),
fn.SUM(SummaryBomber.unfollowed_cnt)
.alias('unfollowed_cnt'),
fn.SUM(SummaryBomber.unfollowed_call_cnt)
.alias('unfollowed_call_cnt'),
fn.SUM(SummaryBomber.call_cnt).alias('call_cnt'),
fn.SUM(SummaryBomber.sms_cnt).alias('sms_cnt'),
fn.SUM(SummaryBomber.call_connect_cnt)
.alias('call_connect_cnt'),
fn.SUM(SummaryBomber.ptp_today_cnt)
.alias('ptp_today_cnt'),
fn.SUM(SummaryBomber.ptp_today_call_cnt)
.alias('ptp_today_call_cnt'),
fn.SUM(SummaryBomber.ptp_next_cnt)
.alias('ptp_next_cnt'),
fn.SUM(SummaryBomber.ptp_next_call_cnt)
.alias('ptp_next_call_cnt'),
fn.SUM(SummaryBomber.KP_cleared_cnt)
.alias('KP_cleared_cnt'),
fn.SUM(SummaryBomber.KP_today_cnt)
.alias('KP_today_cnt'),
fn.SUM(SummaryBomber.work_ind).alias('work_ind'),
fn.SUM(SummaryBomber.calltime_sum)
.alias('calltime_sum'),
fn.SUM(SummaryBomber.calltime_case_sum)
.alias('calltime_case_sum'),
fn.SUM(SummaryBomber.calltime_case_cnt)
.alias('calltime_case_cnt'),
fn.SUM(SummaryBomber.calltime_no_case_sum)
.alias('calltime_no_case_sum'),
fn.SUM(SummaryBomber.calltime_no_case_cnt)
.alias('calltime_no_case_cnt'),
SummaryBomber.cycle.alias('cycle'))
.where(SummaryBomber.time == begin_date,
SummaryBomber.cycle << Cycle.values())
.group_by(SummaryBomber.cycle))
for cycle_data in cycle_datas:
SummaryBomber.create(
bomber_id=cycle_data.cycle,
time=begin_date,
cycle=cycle_data.cycle,
new_case_amount_sum=cycle_data.new_case_amount_sum, # 新件金额(同上)
new_case_cleared_sum=cycle_data.new_case_cleared_sum, # 新件还款(同上)
new_case_cleard_rate=0,
case_made_cnt=cycle_data.case_made_cnt, # 拨打件数
case_made_rate=0,
case_connect_cnt=cycle_data.case_connect_cnt, # 接通件数
case_connect_rate=0,
promised_cnt=cycle_data.promised_cnt, # ptp件数
promised_amount=cycle_data.promised_amount, # ptp金额
cleared_cnt=cycle_data.cleared_cnt, # 回款件数
cleared_amount=cycle_data.cleared_amount, # 回款金额
new_case_cnt=cycle_data.new_case_cnt, # 新件数量(1,2待算)
new_case_call_cnt=cycle_data.new_case_call_cnt, # 新件拨打数(同上)
unfollowed_cnt=cycle_data.unfollowed_cnt,
unfollowed_call_cnt=cycle_data.unfollowed_call_cnt,
call_cnt=cycle_data.call_cnt, # 拨打电话数
sms_cnt=cycle_data.sms_cnt, # 发送短信数
call_connect_cnt=cycle_data.call_connect_cnt, # 接通电话数
calltime_case_avg=0, # 接通件均通话时长 (全部待算)
ptp_today_cnt=cycle_data.ptp_today_cnt, # 当日ptp件数
ptp_today_call_cnt=cycle_data.ptp_today_call_cnt, # 当日ptp到期维护件数
ptp_next_cnt=cycle_data.ptp_next_cnt, # 次日ptp到期数
ptp_next_call_cnt=cycle_data.ptp_next_call_cnt, # 次日到期维护数
KP_cleared_cnt=cycle_data.KP_cleared_cnt, # kp回款件
KP_today_cnt=cycle_data.KP_today_cnt, # 当日处于ptp件数
KP_cleared_rate=0,
work_ind=cycle_data.work_ind, # 当日是否工作
calltime_sum=cycle_data.calltime_sum, # 通话总时长
calltime_case_sum=cycle_data.calltime_case_sum,
calltime_case_cnt=cycle_data.calltime_case_cnt,
calltime_no_case_sum=cycle_data.calltime_no_case_sum,
calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,
work_time_sum=cycle_data.work_time_sum # 工作时长
)
cycle_claimed = get_cycle_claimed(begin_date, end_date)
for claimed in cycle_claimed:
(SummaryBomber.update(
claimed_cnt=claimed[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.cycle == claimed[0],
SummaryBomber.bomber_id == claimed[0]
)).execute()
# 得到新件件数和金额
cycle_new_case(begin_date, end_date)
# 得到新件维护件数
get_cycle_new_case_call(begin_date, end_date)
# 得到新件還款金額
get_cycle_new_case_cleared(begin_date, end_date)
# 修改cycle的拨打件数(累加对于预测试外呼都是打通的)
get_cycle_case_made_cnt(begin_date, end_date)
# 得到计算类数据(各比率)
all_datas = (SummaryBomber.filter(SummaryBomber.time == begin_date))
for data in all_datas:
cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum
if data.new_case_amount_sum else 0) * 100
data.new_case_cleard_rate = cl_rat
case_made_rate = (data.case_made_cnt / data.claimed_cnt
if data.claimed_cnt else 0) * 100
data.case_made_rate = case_made_rate
case_connect_rate = (data.case_connect_cnt / data.case_made_cnt
if data.case_made_cnt else 0) * 100
data.case_connect_rate = case_connect_rate
calltime_case_avg = (data.calltime_case_sum / data.calltime_case_cnt
if data.calltime_case_cnt else 0)
data.calltime_case_avg = calltime_case_avg
calltime_no_case_avg = (data.calltime_no_case_sum /
data.calltime_no_case_cnt
if data.calltime_no_case_cnt else 0)
data.calltime_no_case_avg = calltime_no_case_avg
KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt
if data.KP_today_cnt else 0) * 100
data.KP_cleared_rate = KP_cleared_rate
data.save()
@action(MessageAction.MODIFY_BILL)
def modify_bill(payload, msg_id):
application_id = payload.get('external_id')
principal_paid = Decimal(payload.get('principal_paid', 0))
late_fee = Decimal(payload.get('late_fee', 0))
late_fee_paid = Decimal(payload.get('late_fee_paid', 0))
overdue_days = payload.get('overdue_days')
sub_bill_id = payload.get('bill_sub_id')
partner_bill_id = payload.get('partner_bill_id')
if not application_id:
logging.warning('payload has no external_id. {}'.format(str(payload)))
return
if not overdue_days:
logging.info("application %s not overdue" % application_id)
return
item = (OldLoanApplication
.get_or_none(OldLoanApplication.application_id ==
application_id))
if item:
start_old_application(item, cancel=True)
overdue_bill = (OverdueBill.select()
.where(OverdueBill.external_id == application_id,
OverdueBill.sub_bill_id == sub_bill_id)
.first())
application = (Application.filter(Application.id == application_id)
.first())
if not overdue_bill:
if not application:
logging.info('application %s not in bomber, let it in bomber now',
application_id)
send_to_default_q(MessageAction.APPLICATION_BOMBER, {
'id': application_id,
'bill_sub_id': sub_bill_id
})
return
else:
application = (Application
.filter(Application.id == overdue_bill.collection_id)
.first())
with db.atomic():
application.status = ApplicationStatus.UNCLAIMED.value
application.finished_at = None
application.paid_at = None
application.save()
if overdue_bill:
overdue_bill.status = ApplicationStatus.UNCLAIMED.value
overdue_bill.finished_at = None
overdue_bill.save()
repayment = (RepaymentLog.update(no_active = 1)
.where(RepaymentLog.application == application.id,
RepaymentLog.partner_bill_id == partner_bill_id,
RepaymentLog.overdue_bill_id == overdue_bill.id))
else:
repayment = (RepaymentLog.update(no_active=1)
.where(RepaymentLog.application == application.id,
RepaymentLog.partner_bill_id == partner_bill_id))
repayment_num = repayment.execute()
logging.info("modify_bill no active repayment count:%s" % repayment_num)
if not application.latest_bomber_id:
return
bomber_id = application.latest_bomber_id
(DispatchAppHistory.update(
out_at=None,
out_overdue_days=overdue_days,
out_principal_pending=(application.amount - principal_paid),
out_late_fee_pending=(late_fee - late_fee_paid)
).where(
DispatchAppHistory.application == application.id,
DispatchAppHistory.bomber_id == bomber_id)).execute()
# 获取改变的ids
def get_change_bomber():
cycle_role_map = {5: Cycle.C1B.value, 6: Cycle.C2.value, 8: Cycle.C3.value}
result = {}
bomber_logs = (BomberLog.select(BomberLog.bomber_id,
BomberLog.role_id,
BomberLog.operation,
Bomber.group_id)
.join(Bomber, JOIN_INNER,
on=BomberLog.bomber_id == Bomber.id)
.where(fn.DATE(BomberLog.created_at) == date.today(),
BomberLog.role_id << list(cycle_role_map.keys()),#C1b,c2,c3
BomberLog.operation << (0, 1), #0删除,1创建,3修改
Bomber.instalment == 0) #催收单期的员工
.dicts())
for b_log in bomber_logs:
cycle = cycle_role_map.get(b_log["role_id"])
group_id = b_log["group_id"]
if cycle in result:
if group_id not in result[cycle]:
result[cycle][group_id] = {
"cycle": cycle,
"del_ids": [],
"new_ids": []
}
else:
result[cycle] = {group_id: {
"cycle": cycle,
"del_ids": [],
"new_ids": []}
}
if b_log["operation"] == 0:
result[cycle][group_id]["del_ids"].append(b_log["bomber_id"])
# result 有值表示有人员变动
if result:
bombers = (Bomber.select()
.where(Bomber.role.in_(list(cycle_role_map.keys())),
Bomber.is_del == 0,
Bomber.instalment == 0))
for b in bombers:
cycle_result = result.get(cycle_role_map[b.role_id], {})
role_result = cycle_result.get(b.group_id)
if not role_result:
continue
role_result["new_ids"].append(b.id)
resutl_list = []
for cycle, group_dict in result.items():
resutl_list.extend(list(group_dict.values()))
return resutl_list
return []
# 获取所有的application
def get_total_application(cycle, del_ids, new_ids,
type=ApplicationType.CASH_LOAN.value):
bomber_list = del_ids + new_ids
all_apps = (Application.select(Application.id,
Application.latest_bomber_id.alias(
"latest_bomber_id"),
Application.promised_date,
Bomber.partner_id.alias("partner_id"))
.join(Bomber, JOIN_LEFT_OUTER,
Application.latest_bomber == Bomber.id)
.where(Application.cycle == cycle,
Application.status != ApplicationStatus.REPAID.value,
Application.latest_bomber_id << bomber_list,
Application.type == type)
.order_by(Application.id)
.dicts())
return all_apps
# 获取平均数列表,即每个bomber的平均件的数量
def get_average_number(app_nums, bomber_nums):
average = app_nums // bomber_nums
remainder = app_nums % bomber_nums
average_list = [average for i in range(bomber_nums)]
if remainder == 0:
return average_list
for i in range(remainder):
average_list[i] += 1
# 对结果进行一下随机,不然每次都是前几个人多件
random.shuffle(average_list)
return average_list
# 对appliciton进行分类统计
def classified_statistic_apps(apps):
result = {}
# 根据用户的bomber_id 对数据进行分类统计
for app in apps:
# 将用户下p和没下p的件分开
latest_bomber_id = app["latest_bomber_id"]
if latest_bomber_id not in result:
result[latest_bomber_id] = {
"bid":latest_bomber_id,
"p_list": [],
"np_list": [],
"partner_id": app["partner_id"] if app["partner_id"] else "",
}
promised_date = app.get("promised_date")
if not promised_date or promised_date.date() < date.today():
result[latest_bomber_id]['np_list'].append(app["id"])
else:
result[latest_bomber_id]['p_list'].append(app["id"])
return result
# 获取多余的件,并且计算每个人所需要的件
def get_surplus_application(new_ids, del_ids, average_nums, classified_apps):
surplus_apps = []
# 如果id在删除队列中,将对应id所有的件重新分配
for del_id in del_ids:
del_res = classified_apps.get(del_id,{})
p_list = del_res.get("p_list", [])
np_list = del_res.get("np_list", [])
del_res["need_num"] = -(len(p_list) + len(np_list))
del_res["to_list"] = np_list + p_list
surplus_apps.extend(p_list)
surplus_apps.extend(np_list)
# 计算每个用户的下p和没下p的件的个数,和自己需要的件的个数
for index, bid in enumerate(new_ids):
average = average_nums[index]
bomber_app = classified_apps.get(bid)
if not bomber_app:
# 获取partner_id
bomber = (Bomber.select(Bomber.partner_id)
.where(Bomber.id == bid)
.first())
bomber_app = {
"bid": bid,
"p_list": [],
"p_num": 0,
"np_list": [],
"np_num": 0,
"need_num": average,
"partner_id": bomber.partner_id if bomber else ''
}
classified_apps[bid] = bomber_app
else:
p_num = len(bomber_app["p_list"])
np_num = len(bomber_app["np_list"])
# 如果下p件大于平均值,直接将他剩余所有件都放入到多余列表中
if p_num > average:
bomber_app["need_num"] = - np_num
else:
bomber_app["need_num"] = average - (p_num + np_num)
bomber_app["p_num"] = p_num
bomber_app["np_num"] = np_num
# 将多余的件放入到多余列表中
if bomber_app["need_num"] < 0:
# 将件随机,确保分件的逾期天数尽量均匀
random.shuffle(bomber_app["np_list"])
res_over = bomber_app["np_list"][:-bomber_app["need_num"]]
bomber_app["to_list"] = res_over
surplus_apps.extend(res_over)
# 按照need_num进行排序
classified_apps_list = sorted(classified_apps.values(),
key=lambda x:x["need_num"],
reverse=True)
return surplus_apps, classified_apps_list
# 更新数据库数据,进行分件
def update_applications(surplus_apps, classified_apps, cycle):
# 多余得件进行随机
random.shuffle(surplus_apps)
for app in classified_apps:
status = 0
try:
if app["need_num"] > 0:
from_list = surplus_apps[:app["need_num"]]
# 移除surplus_apps中的元素
for i in from_list: surplus_apps.remove(i)
app["from_list"] = from_list
with db.atomic():
q = Application.update(
{Application.latest_bomber_id: app["bid"]}).where(
Application.id.in_(from_list))
q.execute()
# 分件入案
in_record_params = {
"dest_bomber_id": app["bid"],
"application_ids": from_list,
"dest_partner_id": app["partner_id"],
"cycle": cycle,
}
new_in_record(**in_record_params)
status = 1
elif app["need_num"] < 0:
#分件出案
out_record_params = {
"src_bomber_id": app["bid"],
"application_ids": app["to_list"]
}
new_out_record(**out_record_params)
status = 1
else:
status = 1
except Exception as e:
logging.error("分件异常,params:%s,error:%s"%(app,str(e)))
#记录操作日志
log_params = {
"bomber_id": app["bid"],
"form_ids": json.dumps(app.get("from_list", [])),
"to_ids": json.dumps(app.get("to_list", [])),
"need_num": app.get("need_num"),
"np_ids": json.dumps(app.get("np_list", [])),
"p_ids": json.dumps(app.get("p_list", [])),
"status": status
}
DispatchAppLogs.create(**log_params)
return classified_apps
# 人员变动分配分期的催收单
def get_instalment_change_bomber():
result ={}
bomber_logs = (BomberLog.select(BomberLog.bomber_id,
BomberLog.operation,
Bomber.instalment,
Bomber.group_id)
.join(Bomber, JOIN_INNER,
on=BomberLog.bomber_id == Bomber.id)
.where(fn.DATE(BomberLog.created_at) == date.today(),
BomberLog.operation << [0,1],
Bomber.instalment > 0)
.dicts())
for bl in bomber_logs:
cycle = bl["instalment"]
group_id = bl["group_id"]
if cycle not in result:
result[cycle] = {group_id: {
"cycle": cycle,
"del_ids": [],
"new_ids": []
}}
else:
if group_id not in result[cycle]:
result[cycle][group_id] = {
"cycle": cycle,
"del_ids": [],
"new_ids": []}
if bl["operation"] == 0:
result[cycle][group_id]["del_ids"].append(bl["bomber_id"])
if result:
instalments = list(result.keys())
bombers = (Bomber.select()
.where(Bomber.instalment << instalments,
Bomber.is_del == 0))
for b in bombers:
cycle_result = result.get(b.instalment, {})
group_result = cycle_result.get(b.group_id)
if not group_result:
continue
group_result["new_ids"].append(b.id)
result_list = []
for cycle,group_dict in result.items():
result_list.extend(list(group_dict.values()))
return result_list
return []
def instalment_update_applications(surplus_apps, classified_apps, cycle):
end = 0
for app in classified_apps:
if app["need_num"] <= 0:
continue
start = end
end = start + app["need_num"]
aids = surplus_apps[start:end]
app["from_list"] = aids
status = 0
with db.atomic():
q = (Application.update(last_bomber = Application.latest_bomber,
latest_bomber = app["bid"],
ptp_bomber = None)
.where(Application.id << aids)
.execute())
# 入案和出案
record_param = {
"cycle": cycle,
"application_ids": aids,
"dest_bomber_id": app["bid"],
"dest_partner_id": app["partner_id"],
}
out_and_in_record_instalment(**record_param)
status = 1
# 记录操作日志
log_params = {
"bomber_id": app["bid"],
"form_ids": json.dumps(app.get("from_list", [])),
"to_ids": json.dumps(app.get("to_list", [])),
"need_num": app.get("need_num"),
"np_ids": json.dumps(app.get("np_list", [])),
"p_ids": json.dumps(app.get("p_list", [])),
"status": status
}
DispatchAppLogs.create(**log_params)
return classified_apps
# 执行人员变动分件
def change_bomber_dispatch_apps(change_bombers,
type=ApplicationType.CASH_LOAN.value):
if not change_bombers:
return
for bombers in change_bombers:
del_ids = bombers.get("del_ids", [])
new_ids = bombers.get("new_ids", [])
cycle = bombers.get("cycle")
if not all([new_ids, cycle]):
logging.info(
"获取需要分件的信息异常,bomber:%s,type:%s" % (bombers, type))
continue
# 获取总apps
apps = get_total_application(cycle, del_ids, new_ids, type)
if not apps:
logging.info(
"分件没有获取到对应的件,bomber:%s,type:%s" % (bombers, type))
continue
# 获取平均数列表
average_nums = get_average_number(len(apps), len(new_ids))
# 分类统计apps
classified_apps = classified_statistic_apps(apps)
# 计算每个人需要分的件和多余的件
superlus_apps, classified_apps = get_surplus_application(new_ids,
del_ids,
average_nums,
classified_apps)
# 分件,更新数据库
if type == ApplicationType.CASH_LOAN.value:
result = update_applications(superlus_apps, classified_apps, cycle)
elif type == ApplicationType.CASH_LOAN_STAGING.value:
result = instalment_update_applications(superlus_apps,
classified_apps,
cycle)
else:
logging.info("人员变动触发分件,unknown type:%s" % type)
logging.info("人员变动触发的分件:result:%s,type:%s" % (result, type))
#bomber人员变动,进行分件
@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)
def bomber_dispatch_applications(payload, msg_id):
#通过当天的登录日志,判断人员变动,若删除bomber_log会记录
change_bombers = get_change_bomber()
instalment_change_bombers = get_instalment_change_bomber()
params = {ApplicationType.CASH_LOAN.value: change_bombers,
ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}
for type,bombers in params.items():
change_bomber_dispatch_apps(change_bombers=bombers,type=type)
@action(MessageAction.REPAIR_BOMBER)
def repair_bomber(payload, msg_id):
app_mobile = payload['app_mobile']
username = payload.get('user_name')
logging.info('start repair bomber, number: %s' % app_mobile)
# 得到用户填写的EC,确认该EC号码是否在催收中,并存储关系
if 'mobile_no' in payload and payload['mobile_no']:
mobile = number_strip(str(payload['mobile_no']))[:64]
name = payload.get('mobile_name')
application = Application.filter(Application.user_mobile_no == mobile)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, mobile, username, name)
if 'tel_no' in payload and payload['tel_no']:
tel_no = number_strip(str(payload['tel_no']))[:64]
name = payload.get('tel_name')
application = Application.filter(Application.user_mobile_no == tel_no)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, tel_no, username, name)
def repair_contact(number, application, name):
# 填写的ec有过逾期则将号码加入contact中
application = application.first()
contact = (Contact
.filter(Contact.user_id == application.user_id,
Contact.number == number))
if not contact.exists():
Contact.create(
user_id=application.user_id,
name=name,
number=number,
relationship=Relationship.FAMILY.value,
source='repair ec',
real_relationship=Relationship.FAMILY.value
)
logging.info('add repair contact success, number: %s' % number)
def add_relationship(number, ec_number, username, name):
# 存储关系
query = (TotalContact
.objects(src_number=str(number),
dest_number=ec_number,
source=20,
is_calc=False
)
.first())
if not query:
TotalContact(
src_number=str(number),
src_name=username,
dest_number=ec_number,
dest_name=name,
source=20).save()
logging.info('add relationship success, number: %s' % number)
# 获取要统计的时间范围
def get_summary_daily_time():
mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')
mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')
now_date = datetime.now()
now_date_time = now_date.time()
today_str = str(now_date.date())
if now_date_time < mid_time_t1.time():
yes_date = now_date - timedelta(days=1)
yes_date_str = str(yes_date.date())
begin_str = yes_date_str + ' 17:20:00'
end_str = today_str + ' 00:00:00'
elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():
begin_str = today_str + ' 00:00:00'
end_str = today_str + ' 12:40:00'
else:
begin_str = today_str + ' 12:40:00'
end_str = today_str + ' 17:20:00'
begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')
end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')
# 记录统计的是哪天的数据
summary_datetime = now_date-timedelta(minutes=30)
summary_date = summary_datetime.date()
return begin_time, end_time, summary_date
# 每天12:40 和 17:20 和 凌晨 更新当天数据
@action(MessageAction.SUMMARY_DAILY)
def summary_daily_data(payload, msg_id):
begin_time, end_time, summary_date = get_summary_daily_time()
call_actions = (CallActionsR.select(CallActionsR.id,
CallActionsR.bomber_id,
CallActionsR.application_id,
CallActionsR.promised_date,
CallActionsR.cycle,
CallActionsR.name,
CallActionsR.number)
.where(CallActionsR.created_at >= begin_time,
CallActionsR.created_at < end_time,
CallActionsR.type << (0,1)))
summary_dailys = {}
for call in call_actions:
if call.bomber_id not in summary_dailys:
summary_dailys[call.bomber_id] = {'ptp_cnt': 0,
'call_cnt': 0,
'cycle': call.cycle,
'repayment': 0,
'bomber_id': call.bomber_id,
'summary_date':str(summary_date)}
# C2,C3的下p的件会多一条没有number和name的数据
if call.name and call.number:
summary_dailys[call.bomber_id]['call_cnt'] += 1
if call.promised_date:
summary_dailys[call.bomber_id]['ptp_cnt'] += 1
# 获取回款信息
C1_sql = """
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,a.cycle
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,2,4,5)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
""" % (begin_time, end_time)
C1_repayment = run_all_sql(C1_sql)
other_sql = """
select current_bomber_id,sum(pay_amount) as pay_amount,cycle
from (
select application_id,current_bomber_id,pay_amount,repay_at,cycle
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at,
br.cycle
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (3,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
""" % (begin_time, end_time)
other_repayment = run_all_sql(other_sql)
all_repayment = C1_repayment + other_repayment
for res in all_repayment:
bomber_id,pay_amount,cycle = res
if bomber_id in summary_dailys:
summary_dailys[bomber_id]['repayment'] += pay_amount
else:
summary_dailys[bomber_id] = {'ptp_cnt': 0,
'call_cnt': 0,
'cycle': cycle,
'repayment': pay_amount,
'bomber_id': bomber_id,
'summary_date': str(summary_date)
}
insert_values = list(summary_dailys.values())
if insert_values:
SummaryDaily.insert_many(insert_values).execute()
# 获取本cycle所有没完成的件
def get_cycle_all_no_paid_app(cycle, type=None):
apps = (Application
.select(Application.id,
Application.latest_bomber_id,
Application.ptp_bomber,
Application.promised_date,
Application.cycle)
.where(Application.cycle == cycle,
Application.status != ApplicationStatus.REPAID.value,
Application.type == type)
.dicts())
dis_app_ids = [a['id'] for a in apps]
# 将dispatch_app中的件状态更新
with db.atomic():
for idx in range(0, len(dis_app_ids), 1000):
ids = dis_app_ids[idx:idx + 1000]
q = (DispatchApp.update(status = DisAppStatus.ABNORMAL.value)
.where(DispatchApp.application << ids)
.execute())
return apps
# 根据bomber_id整理app
def get_app_logs(apps):
app_logs = {}
all_np_apps = []
all_p_apps = []
for a in apps:
latest_bomber = a["latest_bomber"]
# 2 代替催收单中latest_bomber是空的情况,
latest_bomber = a["cycle"] if not latest_bomber else latest_bomber
if latest_bomber in app_logs:
app_logs[latest_bomber]["to_ids"].append(a["id"])
else:
app_logs[latest_bomber] = {"bomber_id": latest_bomber,
"to_ids": [a["id"]],
"np_ids": [],
"p_ids": []}
if (a["promised_date"] and
a["promised_date"].date() >= datetime.now().date()):
app_logs[latest_bomber]["p_ids"].append(a["id"])
all_p_apps.append(a)
else:
app_logs[latest_bomber]["np_ids"].append(a["id"])
all_np_apps.append(a)
return app_logs, all_np_apps, all_p_apps
# 月底分件给外包员工
def month_dispatch_app_out_partner(cycle,apps,app_logs,np_apps):
# 件随机
apps = list(apps)
np_apps = list(np_apps)
random.shuffle(np_apps)
apps_len = len(apps)
np_apps_len = len(np_apps)
end = 0
all_app_precentage = 0
# 获取这个cycle所有的的外包
partners = (Partner.select()
.where(Partner.cycle == cycle,
Partner.status == PartnerStatus.NORMAL.value))
for p in partners:
all_app_precentage += p.app_percentage
for partner in partners:
# 获取外包人员
bombers = (Bomber.select()
.where(Bomber.partner == partner.id,
Bomber.is_del == 0,
Bomber.status != BomberStatus.OUTER_LEADER.value))
bids = {b.id:b for b in bombers}
if len(bids) == 0:
logging.info("cycle:%s,partner:%s,no bomber"%(cycle, partner.id))
continue
start = end
if np_apps_len >= int(apps_len * all_app_precentage):
end = start + int(apps_len * partner.app_percentage)
else:
end = (start +
int(np_apps_len * partner.app_percentage / all_app_precentage))
# 外包团队应该获分到的所有件
partner_app = np_apps[start:end]
dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)
# 剩余给内部员工的件
np_apps = np_apps[end:]
return np_apps
# 内部员工分
def month_dispatch_app_inner(cycle,np_apps,app_logs,p_apps):
sys_cycle = {1: 'AB_TEST_C1A',
2: 'AB_TEST_C1B',
3: 'AB_TEST_C2',
4: 'AB_TEST_C3'}
# 获取内容部员工
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = (Bomber.select().where(Bomber.id << sys_values,
Bomber.is_del == 0))
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
bombers = bombers.where(Bomber.instalment == 0)
bids = {b.id:b for b in bombers}
# c1b没有下p的件要进自动外呼
if cycle == Cycle.C1A.value:
np_ids = [a["id"] for a in np_apps]
# 更新没有下p的件
np = (Application
.update(status = ApplicationStatus.PROCESSING.value,
ptp_bomber = None,
latest_bomber = None)
.where(Application.id << np_ids)
.execute())
bomber_app_logs = app_logs.get(cycle, {})
# 月底分件的时候,进自动外呼的件也要有入案和出案记录
out_param = {
"application_ids": bomber_app_logs.get("to_ids", []),
"month_dispatch": 1,
"src_bomber_id": cycle,
}
new_out_record(**out_param)
in_param = {
"cycle": cycle,
"application_ids": np_ids,
"dest_bomber_id": cycle
}
new_in_record(**in_param)
bomber_app_logs["need_num"] = len(np_apps)
bomber_app_logs["form_ids"] = np_ids
bomber_app_logs["status"] = 1
else:
dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)
dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)
# 把件分给bomber
def dispatch_apps_to_bomber(cycle,apps,bids,app_logs,out_partner=True,
type=ApplicationType.CASH_LOAN.value):
apps = list(apps)
random.shuffle(apps)
# 获取每个人应该分个数
bids_list = list(bids.keys())
if len(bids_list) <= 0:
logging.info("get_dispatch_app_to_bomber no bids")
return
average_num = get_average_number(len(apps), len(bids_list))
bomber_end = 0
with db.atomic():
for index, bid in enumerate(bids_list):
current_bomber = bids.get(bid)
bomber_app_logs = app_logs.get(bid, {})
bomber_start = bomber_end
bomber_end = bomber_start + average_num[index]
bomber_apps = apps[bomber_start:bomber_end]
from_p, from_np, from_ids,status = [], [], [], 0
# 区分员工分到的件,哪些是下p的哪些是没下p的
for ba in bomber_apps:
promised_date = ba.get("promised_date")
from_ids.append(ba["id"])
if promised_date and promised_date.date() >= date.today():
from_p.append(ba["id"])
else:
from_np.append(ba["id"])
app_status = ApplicationStatus.AB_TEST.value
# c1A内部下p的件要特殊状态
if (cycle == Cycle.C1A.value and not out_partner
and type == ApplicationType.CASH_LOAN.value):
app_status = ApplicationStatus.PROCESSING.value
if from_p:
p = (Application
.update(ptp_bomber=bid,
latest_bomber=bid,
status=app_status)
.where(Application.id << from_p)
.execute())
p_ids = bomber_app_logs.get("p_ids", []) + from_p
bomber_app_logs["p_ids"] = p_ids
if from_np:
np = (Application
.update(latest_bomber=bid,
ptp_bomber=None,
status=ApplicationStatus.AB_TEST.value)
.where(Application.id << from_np)
.execute())
np_ids = bomber_app_logs.get("np_ids", []) + from_np
bomber_app_logs["np_ids"] = np_ids
in_param = {"cycle": cycle,
"dest_partner_id": current_bomber.partner_id,
"application_ids": from_ids,
"dest_bomber_id": bid,
}
if type == ApplicationType.CASH_LOAN.value:
out_param = {"src_bomber_id": bid,
"application_ids": bomber_app_logs.get("to_ids",[]),
"month_dispatch":1
}
# 出案
new_out_record(**out_param)
# 入案
new_in_record(**in_param)
else:
out_and_in_record_instalment(**in_param)
bomber_app_logs["status"] = 1
need_num = bomber_app_logs.get("need_num", 0) + average_num[index]
bomber_app_logs["need_num"] = need_num
all_form_ids = bomber_app_logs.get("form_ids", []) + from_ids
bomber_app_logs["form_ids"] = all_form_ids
# 如果是内部的分件,不用执行下面的操作
if not out_partner:
continue
# 分给外包的件,要记录在dispatch_app中.将原来的记录删除,在插入新的数据
try:
(DispatchApp.delete()
.where(DispatchApp.application.in_(from_ids))
.execute())
dispatch_ins = [{"application": id,
"partner": current_bomber.partner_id,
"bomber": bid,
"status": DisAppStatus.NORMAL.value,
} for id in from_ids]
(DispatchApp.insert_many(dispatch_ins).execute())
except Exception as e:
logging.info(
"month_disapp_error error:%s,bid:%s,from_ids:%s" %
(str(e), bid, from_ids))
# 计算每个件的逾期天数,根据逾期天数更新对应的cycle
def calc_instalment_apps_cycle():
cycle_list = [Cycle.C2.value, Cycle.C3.value]
for cycle in cycle_list:
apps = (ApplicationR.select(ApplicationR.id,
ApplicationR.cycle,
ApplicationR.overdue_days.alias("ods"),
ApplicationR.latest_bomber,
OverdueBillR.status,
OverdueBillR.overdue_days.alias("oods"))
.join(OverdueBillR, JOIN_LEFT_OUTER,
on=ApplicationR.id == OverdueBillR.collection_id)
.where(ApplicationR.cycle == cycle,
ApplicationR.type ==
ApplicationType.CASH_LOAN_STAGING.value,
ApplicationR.status != ApplicationStatus.REPAID.value)
.dicts())
# 计算催收单真实的overdue_days
lower_apps = {}
for app in apps:
if app["status"] == ApplicationStatus.REPAID.value:
continue
aid = app["id"]
if aid in lower_apps:
lower_apps[aid]["ods"] = max(app["oods"], app["ods"])
else:
lower_apps[aid] = {
"id": aid,
"cycle": cycle,
"ods": app["oods"],
}
# 计算apps的逾期天数和当前cycle是否匹配
for aid,app in lower_apps.items():
new_cycle = get_cycle_by_overdue_days(app["ods"])
if new_cycle != cycle:
update_param = {"cycle":new_cycle,
"overdue_days":app["ods"]}
entry_time = calc_entry_time(app["ods"])
update_param.update(entry_time)
# 更新催收单
(Application.update(**update_param)
.where(Application.id == aid)
.execute())
# 降cycle之后根据逾期天数更新以下几个时间
def calc_entry_time(overdue_days):
app_entry_time = {}
overdue_entry = {
"dpd1_entry": [1, 3],
"C1A_entry": [4, 10],
"C1B_entry": [11, 30],
"C2_entry": [31, 60],
"C3_entry": [61, 90]
}
for key,value in overdue_entry.items():
if value[0] <= overdue_days <= value[1]:
app_entry_time[key] = datetime.now()
else:
app_entry_time[key] = None
return app_entry_time
# 分期分件
def instalment_month_dispatch_app():
sys_cycle = {1: 'AB_TEST_C1A',
2: 'AB_TEST_C1B',
3: 'AB_TEST_C2',
4: 'AB_TEST_C3'}
# 降cycle
calc_instalment_apps_cycle()
instalment_cycle_list = Cycle.values()[:4]
for cycle in instalment_cycle_list:
apps = get_cycle_all_no_paid_app(cycle,
ApplicationType.CASH_LOAN_STAGING.value)
if not apps:
logging.info("instalment_month_dispatch no get apps,cycle:%s"%cycle)
continue
app_logs, all_np_apps, all_p_apps = get_app_logs(apps)
# 获取要分件的成员
if cycle == Cycle.C1A.value:
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = (Bomber.select().where(Bomber.id << sys_values,
Bomber.is_del == 0))
else:
bombers = (Bomber.select().where(Bomber.is_del == 0,
Bomber.instalment == cycle))
bids = {b.id:b for b in bombers}
if not bids:
logging.info("instalment_month_dispatch no bomber,cycle:%s"%cycle)
continue
dispatch_apps_to_bomber(cycle = cycle,
apps = all_p_apps,
bids = bids,
app_logs = app_logs,
out_partner = False,
type = ApplicationType.CASH_LOAN_STAGING.value)
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
dispatch_apps_to_bomber(cycle=cycle,
apps=all_np_apps,
bids=bids,
app_logs=app_logs,
out_partner=False,
type=ApplicationType.CASH_LOAN_STAGING.value)
else:
# 未下p的件要有入案记录
np_ids = [a["id"] for a in all_np_apps]
np = (Application.update(status=ApplicationStatus.UNCLAIMED.value,
ptp_bomber=None,
latest_bomber=None)
.where(Application.id << np_ids,
ApplicationStatus != ApplicationStatus.REPAID.value)
.execute())
in_param = {
"cycle": cycle,
"application_ids": np_ids,
"dest_bomber_id": cycle
}
out_and_in_record_instalment(**in_param)
# 如果有降cycle的件,也记录在历史记录中
try:
dispatch_apps_logs = []
for bid,app in app_logs.items():
alg = {
"bomber_id": bid,
"need_num": -len(app.get("to_ids", [])),
"form_ids": json.dumps(app.get("form_ids", [])),
"to_ids": json.dumps(app.get("to_ids", [])),
"np_ids": json.dumps(app.get("np_ids", [])),
"p_ids": json.dumps(app.get("p_ids", [])),
"status": 1
}
if bid in bids:
alg["need_num"] = app.get("need_num", 0)
dispatch_apps_logs.append(alg)
if dispatch_apps_logs:
DispatchAppLogs.insert_many(dispatch_apps_logs).execute()
except Exception as e:
logging.info(
"instalment_dispatch_app_month log error.cycle:%s,error:%s" % (
cycle, str(e)))
# 每个月月底进行所有件重新分配
@action(MessageAction.MONTH_DISPATCH_APP)
def month_dispatch_app(payload, msg_id):
# 判断几天的日期是不是1号
if datetime.today().day != 1:
logging.info("今天不是1号,不能执行分期件")
return
cycle_list = [Cycle.C1A.value,
Cycle.C1B.value,
Cycle.C2.value,
Cycle.C3.value]
with db.atomic():
for cycle in cycle_list:
apps = get_cycle_all_no_paid_app(cycle,
ApplicationType.CASH_LOAN.value)
if not apps:
logging.info("month_dispatch_app not get apps.cycle:%s"%cycle)
continue
app_logs, all_np_apps, all_p_apps = get_app_logs(apps)
np_apps = month_dispatch_app_out_partner(cycle=cycle,
apps=apps,
app_logs=app_logs,
np_apps = all_np_apps)
if not np_apps and not all_p_apps:
logging.info("month_dispatch_app not get inner apps.cycle:%s",
cycle)
continue
month_dispatch_app_inner(cycle,np_apps,app_logs,all_p_apps)
# 分件日志记录在表中
try:
dispatch_apps_logs = []
for bid,app in app_logs.items():
alg = {
"bomber_id": bid,
"need_num": app.get("need_num",0),
"form_ids": json.dumps(app.get("form_ids", [])),
"to_ids": json.dumps(app.get("to_ids", [])),
"np_ids": json.dumps(app.get("np_ids", [])),
"p_ids": json.dumps(app.get("p_ids", [])),
"status": 1
}
dispatch_apps_logs.append(alg)
for idx in range(0, len(dispatch_apps_logs), 10):
DispatchAppLogs.insert_many(
dispatch_apps_logs[idx:idx + 10]).execute()
except Exception as e:
logging.error(
"insert dispatch_log error:%s,cycle:%s"%(str(e),cycle))
try:
instalment_month_dispatch_app()
except Exception as e:
logging.info("instalment_month_dispatch_error:%s"%str(e))
# 每天定时统计催收单信息
@action(MessageAction.SUMMARY_BOMBER_OVERDUE)
def summary_bomber_overdue_everyday(payload, msg_id):
cycle_list = Cycle.values()
which_day = date.today()
# 获取每个cycle没有完成的订单
for cycle in cycle_list:
apps = (ApplicationR.select(ApplicationR.id,
ApplicationR.cycle,
ApplicationR.ptp_bomber,
ApplicationR.overdue_days,
ApplicationR.promised_date,
ApplicationR.follow_up_date,
ApplicationR.external_id,
OverdueBillR.status,
OverdueBillR.periods,
OverdueBillR.sub_bill_id)
.join(OverdueBillR, JOIN_LEFT_OUTER,
on = ApplicationR.id == OverdueBillR.collection_id)
.where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.no_active == 0,
ApplicationR.cycle == cycle)
.dicts())
bomber_overdue_list = []
for app in apps:
status = app.get("status")
if status == ApplicationStatus.REPAID.value:
continue
ptp_bomber = app.get("ptp_bomber")
promised_date = app.get("promised_date")
follow_up_date = app.get("follow_up_date")
if not promised_date or promised_date.date() < date.today():
ptp_bomber = promised_date = None
if not follow_up_date or follow_up_date.date() < date.today():
follow_up_date = None
overdue_dict = {
"collection_id": app.get("id"),
"external_id": app.get("external_id"),
"sub_bill_id": app.get("sub_bill_id"),
"periods": app.get("periods"),
"cycle": app.get("cycle") if app.get("cycle") else cycle,
"ptp_bomber": ptp_bomber,
"promised_date": promised_date,
"follow_up_date": follow_up_date,
"which_day": which_day,
"overdue_days": app.get("overdue_days")
}
bomber_overdue_list.append(overdue_dict)
try:
if bomber_overdue_list:
with db.atomic():
for index in range(0, len(bomber_overdue_list), 1000):
insert_list = bomber_overdue_list[index: index+1000]
BomberOverdue.insert_many(insert_list).execute()
except Exception as e:
logging.info(
"summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s"%(
cycle,str(which_day),str(e)))
# 每分钟对员工的下p件个数做个统计
@action(MessageAction.BOMBER_PTP_REAL_TIME_SUMMARY)
def bomber_ptp_real_time_summary(payload, msg_id):
ptp_switch_number = 200
sys_ptp_switch = (SystemConfig.select()
.where(SystemConfig.key == 'PTP_SWITCH_NUMBER')
.first())
if sys_ptp_switch and sys_ptp_switch.value.isdigit():
ptp_switch_number = int(sys_ptp_switch.value)
today = datetime.today().date()
ptp_apps = (ApplicationR.select(fn.COUNT(ApplicationR.id).alias('ptp_cnt'),
ApplicationR.latest_bomber)
.where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.cycle < Cycle.C2.value,
ApplicationR.promised_date >= today,
ApplicationR.latest_bomber.is_null(False))
.group_by(ApplicationR.latest_bomber))
bomber_ptps = (BomberPtp.select(BomberPtp.bomber_id))
bomber_ptp_bids = [b.bomber_id for b in bomber_ptps]
insert_result = []
for app in ptp_apps:
ptp_switch = BomberCallSwitch.ON.value
if app.ptp_cnt >= ptp_switch_number:
ptp_switch = BomberCallSwitch.OFF.value
params = {"bomber_id": app.latest_bomber_id,
"ptp_cnt": app.ptp_cnt,
"ptp_switch": ptp_switch,
"auto_ext": app.latest_bomber.auto_ext}
if app.latest_bomber_id in bomber_ptp_bids:
try:
q = (BomberPtp.update(**params)
.where(BomberPtp.bomber_id==app.latest_bomber_id)
.execute())
except Exception as e:
logging.error("ptp_reil_time_summary_error:%s,data,bid:%s" % (
str(e),params,app.latest_bomber_id))
else:
insert_result.append(params)
if insert_result:
BomberPtp.insert_many(insert_result).execute()
# 每天的10:00,14:00,16:30不让接自动外呼,员工把自动外呼的件跟进完,才能接自动外呼
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)
def today_ptp_auto_call_switch(payload, msg_id):
today = datetime.today().date()
next_day = today + timedelta(days=1)
# 获取有今天p到期的件的催收员
apps = (ApplicationR.select(ApplicationR.latest_bomber)
.where(ApplicationR.promised_date < next_day,
ApplicationR.promised_date >= today,
ApplicationR.promised_date.is_null(False),
ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.cycle < Cycle.C2.value,
ApplicationR.latest_bomber.is_null(False))
.group_by(ApplicationR.latest_bomber))
bids = [a.latest_bomber_id for a in apps]
if not bids:
return
q = (BomberPtp.update(today_switch=BomberCallSwitch.OFF.value)
.where(BomberPtp.auto_ext.is_null(False),
BomberPtp.bomber_id << bids)
.execute())
# 每天早上8点定时刷新催收员自动外呼的状态
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)
def update_today_switch_every_day(payload, msg_id):
q = (BomberPtp.update(today_switch=BomberCallSwitch.ON.value)
.where(BomberPtp.auto_ext.is_null(False))
.execute())
# 用户修改电话通知bomber
@action(MessageAction.USER_UPDATE_PHONE)
def user_change_phone(payload, msg_id):
user_id = payload.get("user_id")
new_mobile_no = payload.get("new_mobile_no")
if not all([user_id, new_mobile_no]):
logging.info("用户修改电话,没有获取到用户id获这用户手机号")
return
source = 'applicant updated number'
contacts = (Contact.select()
.where(Contact.user_id == int(user_id)))
if not contacts.exists():
logging.info("用户在contact中没有记录")
return
new_contact = contacts.where(Contact.number == new_mobile_no,
Contact.source == source)
if new_contact.exists():
logging.info("用户手机号已存在")
return
contact = contacts.order_by(-Contact.created_at).first()
Contact.create(user_id=contact.user_id,
name=contact.name,
number = new_mobile_no,
source = source,
relationship = Relationship.APPLICANT.value,
real_relationship = Relationship.APPLICANT.value)
|
flexible
|
{
"blob_id": "1fbe9078748b00efad0211b29ad572df97cda921",
"index": 1958,
"step-1": "<mask token>\n\n\ndef dpd1_process(lst):\n \"\"\"已废弃的方法\"\"\"\n if not lst:\n return\n for key, l in lst.items():\n rule = getattr(BeforeInBomber, key).value\n query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)\n ).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(\n rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.\n call_success()))\n success_set = {i.loanid for i in query}\n failed_list = list(set(l) - success_set)\n post_params = {'$and': rule.get('$and'), 'app_list': failed_list}\n resp = Hyperloop().post('/bomber/score/verify', json=post_params)\n if not resp.ok:\n logging.error('hyperloop score verification failed: %s, %s',\n str(resp.status_code), str(resp.text))\n logging.error('hyperloop score verification failed: %s', str(\n post_params))\n continue\n logging.debug('hyperloop score verification success: %s', resp.content)\n resp_json = resp.json()\n app_list = resp_json['data']\n if not app_list:\n continue\n for item in app_list:\n if random.randint(0, 5) == 1:\n send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':\n int(item)})\n\n\n@action(MessageAction.GET_IVR)\ndef get_ivr(payload, msg_id):\n logging.warning('start get_ivr')\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'DPD1-3_INTO_IVR').first()\n now = date.today()\n if sys_config and sys_config.value:\n start = now - timedelta(days=3)\n else:\n start = now\n end = now + timedelta(days=4)\n item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()\n if not item:\n AutoIVR.delete().execute()\n current_page = 0\n elif item.current_page >= item.total_page:\n return\n else:\n current_page = item.current_page\n auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,\n 'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,\n 'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':\n 10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,\n 'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,\n 'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,\n 'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,\n 'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,\n 'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,\n 'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,\n 'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,\n 'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,\n 'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,\n 'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':\n 101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,\n 'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,\n 'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,\n 'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}\n current_page += 1\n with db.atomic() as transaction:\n while True:\n bill_service = BillService()\n ivr_action = bill_service.ivr_pages(page=current_page,\n page_size=500, start_time=utc_datetime(str(start)),\n end_time=utc_datetime(str(end)))\n result = ivr_action['result']\n page_size = int(ivr_action.get('page_size', 0))\n total_page = int(ivr_action.get('total_page', 0))\n insert_args = []\n for a in result:\n due_at = no_utc_datetime(a['due_at'])\n days = (due_at.date() - now).days\n if days == 2:\n continue\n if days > 0:\n time = str(days)\n else:\n time = str(days).replace('-', 'PDP')\n key = a['app_name'] + str(a['su']) + time\n group = auto_ivr.get(key)\n user_id = a['user_id']\n try:\n user_resp = AccountService().get_user(path_params={\n 'user_id': user_id})\n if str(user_resp['mobile_no']) == str(a['user_mobile_no']):\n numbers = a['user_mobile_no']\n else:\n numbers = a['user_mobile_no'] + ',' + user_resp.get(\n 'mobile_no')\n except:\n logging.error('Request Account Service Error.')\n numbers = a['user_mobile_no']\n insert_args.append({'application_id': a['id'], 'numbers':\n numbers, 'group': group, 'user_id': user_id})\n AutoIVR.insert_many(insert_args).execute()\n if current_page == 1:\n IVRActionLog.create(total_page=total_page, proc_date=now,\n page_size=page_size, current_page=current_page)\n item = IVRActionLog.get(IVRActionLog.proc_date == now)\n else:\n item.current_page = current_page\n item.page_size = page_size\n item.total_page = total_page\n item.save()\n transaction.commit()\n current_page += 1\n if current_page > int(total_page):\n break\n if sys_config and sys_config.value:\n try:\n classfiy_dpd_ptp_apps()\n except Exception as e:\n logging.error('dpd1-3_test_error:%s' % str(e))\n\n\ndef ivr_t2_test():\n t2_groups = [39, 40, 41, 42, 43, 44]\n ivr_test_proportion = 0.2\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'IVR_TEST_PROPORTION').first()\n if sys_config and sys_config.value:\n ivr_test_proportion = float(sys_config.value)\n t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.\n status == AutoIVRStatus.AVAILABLE.value)\n t2_dict = defaultdict(list)\n for ivr in t2_ivrs:\n t2_dict[ivr.group].append(ivr.id)\n test_ivr_ids = []\n for group, ivr_ids in t2_dict.items():\n number = ceil(len(ivr_ids) * ivr_test_proportion)\n test_ivr_ids += ivr_ids[:number]\n if not test_ivr_ids:\n return\n q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.\n group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.APP_MERGE)\n@deprecated(version='1.0', reason='This function will be removed soon')\ndef app_merge(payload, msg_id):\n sql = \"\"\"\n select *\n from (\n select a.id as id\n from dashboard.application as a\n inner join repayment.bill2 as b on b.external_id = a.id\n where not exists (\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.app = 'DanaCepat'\n and a.is_first_loan = 1\n and a.apply_at < '2018-08-23 20:50:00'\n and b.overdue_days between 1 and 3\n and b.status != 2) result\n where not exists (\n select 1\n from bomber.application as a\n where a.cycle = 1\n and a.status = 4\n and a.id = result.id\n )\n \"\"\"\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n new_data = cursor.fetchall()\n cursor.close()\n if new_data:\n bomber = [103, 104]\n for d in new_data:\n app_id = {'id': d[0]}\n application_overdue(app_id, None)\n Application.update(status=ApplicationStatus.AB_TEST.value,\n latest_bomber=random.choice(bomber), ptp_bomber=None).where(\n Application.id == d[0]).execute()\n logging.warning('add new app success')\n ptp = date.today() - timedelta(days=1)\n del_sql = (\n \"\"\"\n select a.id\n from bomber.application as a\n where exists(\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.cycle = 1\n and a.status = 4\n and (a.promised_date is null or a.promised_date < \"%s\")\n \"\"\"\n % ptp)\n cursor = readonly_db.get_cursor()\n cursor.execute(del_sql)\n del_date = cursor.fetchall()\n cursor.close()\n if del_date:\n return\n ids = list()\n for d in del_date:\n ids.append(d[0])\n Application.update(status=ApplicationStatus.UNCLAIMED.value,\n latest_bomber=None).where(Application.id << ids).execute()\n\n\n@action(MessageAction.APPLICATION_BOMBER)\ndef application_overdue(payload, msg_id):\n application_id = payload['id']\n sub_bill_id = payload['bill_sub_id']\n local_app = Application.select().where(Application.external_id ==\n application_id).order_by(Application.finished_at).first()\n if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:\n logging.info('application %s overdue, already exists', application_id)\n add_contact(local_app)\n return\n if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:\n overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==\n sub_bill_id, OverdueBillR.external_id == application_id)\n if overdue_bill.exists():\n logging.info(\n 'application %s,sub_bill_id %s overdue, already exists' % (\n application_id, sub_bill_id))\n return\n try:\n sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])\n sub_bill = sub_bill[0]\n except Exception:\n logging.error(\n 'application %s overdue, get sub_bill info failed:Request To repayment Error'\n , application_id)\n return\n if sub_bill['status'] == 2:\n logging.error('application %s overdue, but bills already cleared',\n application_id)\n return\n overdue_days = sub_bill.get('overdue_days', 0)\n if overdue_days == 0:\n logging.info('application {} no overdue'.format(str(application_id)))\n return\n gold_eye = GoldenEye().get('/applications/%s' % application_id)\n if not gold_eye.ok:\n logging.error('get application %s failed: Request to GoldenEye.',\n application_id)\n return\n gold_app = gold_eye.json().get('data')\n user_id = gold_app['user_id']\n apply_history = Dashboard().get('/users/%s/apply-history' % user_id)\n if not apply_history.ok:\n logging.error(\n 'get user %s apply history failed: Request to Dashboard Failed.',\n user_id)\n return\n history = apply_history.json().get('data')\n loan_success_times = len([(1) for i in history if i['status'] in [80, \n 90, 100, 70] and i['id'] != gold_app['id']])\n id = application_id\n type = ApplicationType.CASH_LOAN.value\n bill_id = sub_bill.get('bill_id')\n amount = sub_bill.get('amount')\n amount_net = sub_bill.get('amount_net')\n interest_rate = sub_bill.get('interest_rate')\n overdue_days = sub_bill.get('overdue_days')\n origin_due_at = sub_bill.get('origin_due_at')\n sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,\n 'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),\n 'overdue_days': overdue_days, 'origin_due_at': origin_due_at,\n 'amount': amount, 'amount_net': amount_net, 'interest_rate':\n interest_rate, 'external_id': application_id}\n if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:\n if local_app and local_app.status != ApplicationStatus.REPAID.value:\n sub_overdue_bill['collection_id'] = local_app.id\n local_app.amount += amount\n local_app.amount_net += amount_net\n local_app.save()\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('application %s,sub_bill_id:%s overdue created' %\n (application_id, sub_bill_id))\n return\n else:\n id = idg()\n type = ApplicationType.CASH_LOAN_STAGING.value\n sub_overdue_bill['collection_id'] = id\n ptp_info = BombingHistory.filter(BombingHistory.application == id).first()\n promised_amount = ptp_info and ptp_info.promised_amount\n promised_date = ptp_info and ptp_info.promised_date\n application = Application.create(id=id, user_id=gold_app['user_id'],\n user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[\n 'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],\n contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(\n 'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=\n birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(\n gold_app.get('id_ektp')), profile_province=(gold_app.get(\n 'profile_province') or {}).get('name'), profile_city=(gold_app.get(\n 'profile_city') or {}).get('name'), profile_district=(gold_app.get(\n 'profile_district') or {}).get('name'), profile_residence_time=\n gold_app.get('profile_residence_time'), profile_residence_type=\n gold_app.get('profile_residence_type'), profile_address=gold_app.\n get('profile_address'), profile_education=gold_app.get(\n 'profile_education'), profile_college=(gold_app.get(\n 'profile_college') or {}).get('name'), job_name=gold_app.get(\n 'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get\n ('job_bpjs'), job_user_email=gold_app.get('job_user_email'),\n job_type=gold_app.get('job_type'), job_industry=gold_app.get(\n 'job_industry'), job_department=gold_app.get('job_department'),\n job_province=(gold_app.get('job_province') or {}).get('name'),\n job_city=(gold_app.get('job_city') or {}).get('name'), job_district\n =(gold_app.get('job_district') or {}).get('name'), job_address=\n gold_app.get('job_address'), amount=amount, amount_net=amount_net,\n interest_rate=interest_rate, term=gold_app.get('term'),\n origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=\n sub_bill.get('repay_at'), loan_success_times=loan_success_times,\n arrived_at=datetime.now(), follow_up_date=datetime.now(),\n promised_amount=promised_amount, promised_date=promised_date,\n external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=\n datetime.now())\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('overdue application %s created', application_id)\n Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,\n status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)\n add_contact(application)\n\n\n<mask token>\n\n\ndef get_contact_from_mongo(number):\n if not number:\n return []\n query = TotalContact.objects(src_number=number, source__in=TotalContact\n .available()).order_by('source')\n lst = []\n for c in query:\n relation = TotalContact.relationship(c.source)\n if relation == -1:\n continue\n source = TotalContact.str_source(c.source)\n if not source:\n continue\n lst.append({'related_number': c.dest_number, 'source': source,\n 'is_calc': c.is_calc, 'total_count': c.total_count,\n 'total_duration': c.total_duration, 'relation': relation,\n 'name': c.dest_name})\n return lst\n\n\n<mask token>\n\n\ndef check_key_not_none(payload, keys):\n for key in keys:\n if payload.get(key) is None:\n logging.error('Missing args {}'.format(str(key)))\n return False\n return True\n\n\n<mask token>\n\n\n@action(MessageAction.BILL_RELIEF)\ndef bill_relief(payload, msg_id):\n \"\"\"已废弃\"\"\"\n bill = payload['head_bill']\n repay_at = str_no_utc_datetime(bill['latest_repay_at'])\n updated_row = Application.update(repay_at=repay_at).where(Application.\n id == bill['external_id']).execute()\n logging.info('application %s bill relief done', bill['external_id'])\n return updated_row\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)\ndef calc_overdue_days_over(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query = Application.update(overdue_days=overdue_days).where(Application\n .status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.\n overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)\n updated_rows_count = query.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count)\n try:\n calc_overdue_days_over_instalment()\n except Exception as e:\n logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))\n apps = Application.filter(Application.status << [ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value], Application.overdue_days > 95, \n Application.promised_date.is_null(True) | (fn.DATE(Application.\n promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {\n 'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n\n\n<mask token>\n\n\ndef calc_overdue_days_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n today_now_time = datetime.now()\n month_first_day = today_now_time.replace(day=1, hour=1, minute=30,\n second=0, microsecond=0)\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days <= 95)\n updated_rows_count = query.execute()\n logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %\n (updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.\n created_at, OverdueBill.collection_id, OverdueBill.overdue_days\n ).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.\n collection_id == Application.id).where(Application.status ==\n status, Application.type == ApplicationType.CASH_LOAN_STAGING.value\n )\n app_update = {}\n for ob in overdue_bills:\n if (ob.status == ApplicationStatus.REPAID.value and ob.\n created_at < month_first_day):\n continue\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)\ndef automatic_escalation(payload, msg_id):\n app_ids = payload.get('application_list', [])\n if not app_ids:\n return\n apps = Application.select().where(Application.id.in_(app_ids), \n Application.status != ApplicationStatus.REPAID.value)\n for a in apps:\n new_cycle = application_entry_different_calculations(a)\n if a.overdue_days < 90:\n logging.info(\n 'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'\n .format(a.id, new_cycle, a.cycle, a.overdue_days))\n if new_cycle > a.cycle:\n with db.atomic():\n if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle\n .C1B.value):\n bomber_id = (a.latest_bomber_id if a.latest_bomber_id else\n a.cycle)\n DispatchAppHistory.update(out_at=datetime.now(),\n out_overdue_days=a.overdue_days).where(\n DispatchAppHistory.application == a.id, \n DispatchAppHistory.bomber_id == bomber_id).execute()\n Escalation.create(application=a.id, type=EscalationType.\n AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,\n current_cycle=a.cycle, escalate_to=new_cycle,\n current_bomber_id=a.latest_bomber)\n dis_app_update = DispatchApp.update(status=DisAppStatus.\n ABNORMAL.value).where(DispatchApp.application == a.id)\n dis_app_update.execute()\n a.cycle = new_cycle\n a.last_bomber = a.latest_bomber\n a.status = ApplicationStatus.UNCLAIMED.value\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n if new_cycle == Cycle.C1B.value:\n a.C1B_entry = datetime.now()\n elif new_cycle == Cycle.C2.value:\n a.C2_entry = datetime.now()\n elif new_cycle == Cycle.C3.value:\n a.C3_entry = datetime.now()\n a.save()\n logging.info('automatic escalation done')\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY)\ndef cron_summary(payload, msg_id):\n \"\"\"已废弃\"\"\"\n employees = Bomber.select(Bomber, Role).join(Role)\n summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,\n 'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,\n 'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':\n 0, 'sms_sent': 0} for i in employees}\n now_date = date.today()\n cal_date = now_date - timedelta(days=1)\n claimed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('claimed')).where(fn.DATE(Application.\n claimed_at) == cal_date, Application.status << [ApplicationStatus.\n PROCESSING.value, ApplicationStatus.REPAID.value], Application.\n latest_bomber.is_null(False)).group_by(Application.latest_bomber)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False)).group_by(\n Application.latest_bomber)\n completed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('completed')).where(Application.\n latest_bombing_time.is_null(False), fn.DATE(Application.\n latest_bombing_time) == cal_date, Application.latest_bomber.is_null\n (False)).group_by(Application.latest_bomber)\n escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(\n Escalation.id).alias('escalated')).where(fn.DATE(Escalation.\n created_at) == cal_date, Escalation.type == EscalationType.\n AUTOMATIC.value, Escalation.current_bomber.is_null(False), \n Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation\n .current_bomber)\n transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).\n alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==\n cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(\n Transfer.operator)\n promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(\n BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.\n created_at) == cal_date, BombingHistory.result == BombingResult.\n HAS_PROGRESS.value).group_by(BombingHistory.bomber)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,\n RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.\n current_bomber)\n calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id\n ).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==\n cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)\n calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.\n record_id).alias('calls_connected')).where(fn.DATE(CallLog.\n time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==\n '1').group_by(CallLog.user_id)\n sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(\n ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_\n (ConnectType.sms()), ConnectHistory.created_at >= cal_date, \n ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)\n for i in claimed:\n summary[i.latest_bomber_id]['claimed'] += i.claimed\n for i in completed:\n summary[i.latest_bomber_id]['completed'] += i.completed\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n for i in escalated:\n summary[i.current_bomber_id]['escalated'] += i.escalated\n for i in transferred:\n summary[i.operator_id]['transferred'] += i.transferred\n for i in promised:\n summary[i.bomber_id]['promised'] += i.promised\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in calls_made:\n summary[int(i.user_id)]['calls_made'] += i.calls_made\n for i in calls_connected:\n summary[int(i.user_id)]['calls_connected'] += i.calls_connected\n for i in sms_sent:\n summary[i.operator_id]['sms_sent'] += i.sms_sent\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'claimed': data['claimed'], 'completed': data['completed'],\n 'cleared': data['cleared'], 'escalated': data['escalated'],\n 'transferred': data['transferred'], 'promised': data['promised'\n ], 'amount_recovered': data['amount_recovered'], 'calls_made':\n data['calls_made'], 'calls_connected': data['calls_connected'],\n 'sms_sent': data['sms_sent'], 'date': cal_date})\n if insert_args:\n Summary.insert_many(insert_args).execute()\n cycle_args = []\n escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(\n Escalation.id).alias('escalated_in')).where(Escalation.status ==\n ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==\n cal_date).group_by(Escalation.escalate_to)\n for i in escalated_in:\n cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.\n escalated_in, 'date': cal_date})\n amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM\n (RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)\n for i in amount_recovered_total:\n amount_recovered_total = i.principal_part + i.late_fee_part\n cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':\n amount_recovered_total, 'date': cal_date})\n if cycle_args:\n Summary.insert_many(cycle_args).execute()\n logging.info('cal summary done')\n send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY2)\ndef cron_summary2(payload, msg_id):\n \"\"\"已废弃,定时任务还在执行,具体情况待确定\"\"\"\n cal_date = date.today() - timedelta(days=1)\n employees = Bomber.select(Bomber, Role).join(Role)\n auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,\n AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')\n ).where(fn.DATE(AutoCallActions.created_at) == cal_date)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.\n is_null(False), RepaymentLog.is_bombed == True)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False))\n auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,\n AutoCallActions.result)\n amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)\n cleared = cleared.group_by(Application.latest_bomber)\n summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,\n 'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered': \n 0} for e in employees}\n for a in auto_call_actions:\n summary[a.bomber_id]['answered_calls'] += a.count\n if a.result == AutoCallResult.PTP.value:\n summary[a.bomber_id]['ptp'] += a.count\n if a.result == AutoCallResult.FOLLOW_UP.value:\n summary[a.bomber_id]['follow_up'] += a.count\n if a.result == AutoCallResult.NOT_USEFUL.value:\n summary[a.bomber_id]['not_useful'] += a.count\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'answered_calls': data['answered_calls'], 'ptp': data['ptp'],\n 'follow_up': data['follow_up'], 'not_useful': data['not_useful'\n ], 'cleared': data['cleared'], 'amount_recovered': str(data[\n 'amount_recovered']), 'date': cal_date})\n if insert_args:\n Summary2.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_DISCOUNT_APPROVED)\ndef bomber_discount_approved(payload, msg_id):\n app_id = payload['id']\n msg_type = payload['msg_type']\n discount_to = payload['discount_to']\n effective_to = payload['effective_to']\n application = Application.filter(Application.id == app_id).first()\n if not application:\n logging.error(\n 'discount approved msg send failed application %s not found',\n app_id)\n return\n template = Template.select(Template.text, Template.app).where(Template.\n type == ConnectType.AUTO_SMS.value, Template.id << Template.\n get_auto_sms_tpl(msg_type), Template.app == application.app).first()\n if not template:\n logging.error('discount approved msg send failed template %s not found'\n , msg_type)\n return\n promised_date = None\n if application.promised_date:\n promised_date = application.promised_date.strftime('%d-%m-%Y')\n tpl_data = {'user_name': application.user_name, 'due_days': application\n .overdue_days, 'app_name': application.app, 'phone': application.\n user_mobile_no, 'cs_number': cs_number_conf.get(application.app,\n '02150202889'), 'promised_date': promised_date, 'discount_to':\n discount_to, 'effective_to': effective_to}\n content = template.text.format(**tpl_data)\n data_list = [{'receiver': '62' + application.user_mobile_no, 'content':\n content, 'title': ''}]\n send_sms(data_list, msg_type, application.app)\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST)\ndef bomber_auto_call_list(payload, msg_id):\n with db.atomic():\n bomber_dispatch_app()\n dispatch_instalment_app()\n dis_apps = DispatchApp.select(DispatchApp.application).where(\n DispatchApp.status == DisAppStatus.NORMAL.value)\n c1_apps = Application.select(Application.id, Application.cycle,\n Application.follow_up_date, Application.called_times).where(Application\n .status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.\n AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.\n is_rejected == False, Application.promised_date.is_null(True) | (fn\n .DATE(Application.promised_date) < datetime.today().date())).order_by(\n Application.overdue_days, Application.apply_at)\n dis_apps_ids = [da.application_id for da in dis_apps]\n insert_args = []\n for a in c1_apps:\n if a.id in dis_apps_ids:\n continue\n insert_args.append({'application': a.id, 'cycle': a.cycle,\n 'follow_up_date': a.follow_up_date, 'called_times': 1 if a.\n called_times else 0, 'description': 'init'})\n if not insert_args:\n logging.error('no application need auto call')\n with db.atomic():\n AutoCallList.delete().execute()\n for idx in range(0, len(insert_args), 100):\n AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()\n for idx in range(0, len(insert_args), 100):\n application_list = [i['application'] for i in insert_args[idx:idx +\n 100]]\n send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {\n 'application_list': application_list})\n logging.info('bomber generate auto call list finished')\n send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})\n\n\nclass ChangeBomberTool(object):\n\n @staticmethod\n def in_record(bomber_id, ids, bd):\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.\n alias('application_id'), R(str(bomber_id)).alias('bomber_id'),\n fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(\n 'expected_out_time'), Application.overdue_days.alias(\n 'entry_overdue_days')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << ids)\n Application.update(latest_bomber=bomber_id).where(Application.id.\n in_(ids)).execute()\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bd),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n @staticmethod\n def out_record(a, bd):\n _id = str(a.id)\n DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a\n .overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]\n .get('principal_paid')), out_late_fee_pending=bd[_id].get(\n 'late_fee') - bd[_id].get('late_fee_paid')).where(\n DispatchAppHistory.application == a.id, DispatchAppHistory.\n bomber_id == a.latest_bomber_id).execute()\n a.last_bomber = a.latest_bomber\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n a.save()\n\n @staticmethod\n def classify(l, b):\n if len(l) == 1:\n return l[0]\n _l = filter(lambda x: x['bomber'] != b, l)\n return min(_l, key=lambda x: len(x['ids']))\n\n\n<mask token>\n\n\ndef bomber_dispatch_app():\n try:\n c1a_dispatch_app()\n except Exception as e:\n logging.error('c1a_dispatch_app error:%s' % str(e))\n cycle = {(1): 10, (2): 30, (3): 60, (4): 90}\n apps = Application.select().where(fn.DATE(Application.C2_entry) == date\n .today(), Application.type == ApplicationType.CASH_LOAN.value)\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C2.value)\n apps_ids = [a.id for a in apps]\n dispatch_inserts = []\n start_index = 0\n apps_length = len(apps_ids)\n logging.warning('apps length %s' % str(apps_length))\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)\n gen = CycleIter([b.id for b in bombers])\n existing_list = []\n end_index = start_index + int(apps_length * p.app_percentage)\n logging.info('partner length %s' % str(end_index))\n if not apps_ids[start_index:end_index]:\n continue\n bills = BillService().bill_list(application_ids=apps_ids[\n start_index:end_index])\n bill_dict = {bill['application_id']: bill for bill in bills}\n for a_id in apps_ids[start_index:end_index]:\n bomber = average_gen(gen, existing_list)\n q = DispatchApp.delete().where(DispatchApp.application == a_id\n ).execute()\n dispatch_inserts.append({'application': a_id, 'bomber': bomber,\n 'partner': p.id})\n application = Application.select().where(Application.id == a_id\n ).first()\n application.latest_bomber = bomber\n application.status = ApplicationStatus.AB_TEST.value\n application.ptp_bomber = None\n application.save()\n day_next_cycle = cycle.get(application.cycle\n ) - application.overdue_days\n DispatchAppHistory.create(application=a_id, partner_id=p.id,\n bomber_id=bomber, entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - Decimal(\n bill_dict[a_id].get('principal_paid')),\n entry_late_fee_pending=Decimal(bill_dict[a_id].get(\n 'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')\n ), expected_out_time=date.today() + timedelta(days=\n day_next_cycle))\n start_index = end_index\n with db.atomic():\n for idx in range(0, len(dispatch_inserts), 100):\n DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()\n config = SystemConfig.prefetch(SCI.AB_TEST_C2)\n c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)\n c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)\n c2 = apps_ids[start_index:]\n if c2:\n bills = BillService().bill_list(application_ids=c2)\n else:\n bills = []\n bill_dict = {bill['application_id']: bill for bill in bills}\n logging.info('c2 AB_test length: %s' % str(c2))\n gen = CycleIter(c2_bomber)\n existing_list = []\n for c in c2:\n bomber = average_gen(gen, existing_list)\n application = Application.filter(Application.id == c).first()\n application.status = ApplicationStatus.AB_TEST.value\n application.latest_bomber = bomber\n application.ptp_bomber = None\n application.save()\n day_next_cycle = 46 - application.overdue_days\n DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at\n =datetime.now(), entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - bill_dict[c].get(\n 'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(\n 'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),\n expected_out_time=date.today() + timedelta(days=day_next_cycle))\n ab_test_other()\n\n\ndef c1a_dispatch_app():\n today = datetime.today().date()\n tomorrow = today + timedelta(days=1)\n c1a_apps = Application.select().where(Application.status << [\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value], Application.dpd1_entry >= today, Application.dpd1_entry <\n tomorrow, Application.type == ApplicationType.CASH_LOAN.value)\n all_aids = [a.id for a in c1a_apps]\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C1A.value)\n end = 0\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n is_del == 0)\n start = end\n end += int(len(all_aids) * p.app_percentage)\n aids = all_aids[start:end]\n bids = [b.id for b in bombers]\n if not bids or not aids:\n continue\n average_number = get_average_number(len(aids), len(bids))\n p_end = 0\n for i, bid in enumerate(bids):\n p_start = p_end\n p_end += average_number[i]\n b_aids = aids[p_start:p_end]\n with db.atomic():\n q = Application.update(latest_bomber=bid, status=\n ApplicationStatus.AB_TEST.value).where(Application.id <<\n b_aids).execute()\n params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,\n 'application_ids': b_aids, 'dest_bomber_id': bid}\n new_in_record(**params)\n try:\n dispatch_inserts = []\n for aid in b_aids:\n dispatch_inserts.append({'application': aid, 'bomber':\n bid, 'partner': p.id, 'status': DisAppStatus.NORMAL\n .value})\n if dispatch_inserts:\n q = DispatchApp.insert_many(dispatch_inserts).execute()\n except Exception as e:\n logging.error('c1a分件写入dispatch_app error:%s' % str(e))\n\n\n<mask token>\n\n\ndef get_cash_bomber(bids, cycle):\n cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==\n 0, Bomber.instalment != cycle)\n cash_bids = [b.id for b in cash_bombers]\n return cash_bids\n\n\n<mask token>\n\n\ndef out_and_in_record_instalment(**kwargs):\n if not kwargs.get('application_ids'):\n return\n out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(\n DispatchAppHistory.application << kwargs['application_ids'],\n DispatchAppHistory.out_at.is_null(True)).execute()\n cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}\n period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')\n kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(kwargs['dest_bomber_id'])).alias(\n 'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.\n alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).\n alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << kwargs[\n 'application_ids'])\n application_list = list(subquery)\n for idx in range(0, len(application_list), 50):\n applications = application_list[idx:idx + 50]\n app_ids = [i.application_id for i in applications]\n overdue_bills = OverdueBill.select().where(OverdueBill.\n collection_id << app_ids)\n sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]\n bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)\n insert_args = lambad_instalment_result(bill_list, applications)\n if not insert_args:\n continue\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef c1b_dispatch_in_record(**kwargs):\n app_ids = kwargs.get('apps')\n partner_id = kwargs.get('partner_id', 'null')\n bill_dict = kwargs.get('bill_dict')\n period = kwargs.get('period')\n bomber_id = kwargs.get('bomber_id')\n if not all([app_ids, partner_id, bill_dict, period]):\n return False\n bill_dict = {str(k): v for k, v in bill_dict.items()}\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().\n alias('entry_at'), Application.overdue_days.alias(\n 'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.id << app_ids)\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef check_call_history(application):\n app_create_at = application.created_at + timedelta(days=4)\n if datetime.today().date() > app_create_at.date():\n call_actions = CallActions.select().where(CallActions.type == 0, \n CallActions.application == application.id, CallActions.\n created_at > datetime.now() - timedelta(days=5))\n for call in call_actions:\n if call.phone_status == PhoneStatus.CONNECTED.value:\n return True\n return False\n return True\n\n\n@action(MessageAction.BOMBER_SCAVENGER)\ndef scavenger(payload, msg_id):\n scavenger_time = -60\n scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'SCAVENGER_TIME').first()\n if scavenger and scavenger.value.isdigit():\n scavenger_time = -int(scavenger.value)\n update_auto_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value, description='scavenger').where(AutoCallList.status ==\n AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime\n .now() + timedelta(minutes=scavenger_time))\n count = update_auto_call_list.execute()\n logging.info('scavenger processed %s application', count)\n mail_box_scavenger_time = -30\n mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'MAIL_BOX_SCAVENGER_TIME').first()\n if mail_box_scavenger and mail_box_scavenger.value.isdigit():\n mail_box_scavenger_time = -int(mail_box_scavenger.value)\n update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.\n value, AutoCallList.updated_at < datetime.now() + timedelta(minutes\n =mail_box_scavenger_time))\n mail_box_count = update_mail_box_call_list.execute()\n logging.info('scavenger update mail box %s', mail_box_count)\n update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value\n ).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.\n updated_at < datetime.now() + timedelta(minutes=-30))\n ivr_result = update_auto_ivr.execute()\n logging.info('scavenger update %s ivr' % ivr_result)\n\n\n<mask token>\n\n\n@action(MessageAction.REPORT_BOMBER_COLLECTION)\ndef report_bomber_collection(payload, msg_id):\n start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)\n ).scalar()\n now = datetime.now()\n if start_date and str(start_date) == str(now)[:10]:\n return\n end_date = str(now + timedelta(days=1))[:10]\n start_date = str(now)[:10]\n dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))\n all_overdue_loan_sql1 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n GROUP BY 1;\n \"\"\"\n s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()\n d1 = OperatedDict(s_data1)\n all_overdue_loan_sql2 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE DATE(ba.follow_up_date) > CURDATE()\n AND ba.called_counts = 0\n GROUP BY 1;\n \"\"\"\n s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()\n d2 = OperatedDict(s_data2)\n overdue_loans_entered_into_predict_call_system_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE ba.called_counts >= 1\n GROUP BY 1;\n \"\"\"\n s_data3 = readonly_db.execute_sql(\n overdue_loans_entered_into_predict_call_system_sql).fetchall()\n d3 = OperatedDict(s_data3)\n loans_completed_sql = \"\"\"\n SELECT ba.cycle, COUNT(DISTINCT ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()\n d4 = OperatedDict(s_data4)\n connected_calls_automatic_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()\n d5 = OperatedDict(s_data5)\n connected_calls_manual_sql = \"\"\"\n SELECT bb.cycle, COUNT(bb.id)\n FROM bomber.bombing_history bb\n WHERE DATE(bb.created_at) = curdate()\n AND (bb.bomber_id < 150 OR bb.bomber_id > 200)\n GROUP BY bb.cycle;\n \"\"\"\n s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()\n d6 = OperatedDict(s_data6)\n logging.info('Directly get data from database successfully.')\n c1 = d1 - d2\n c2 = d3\n c3 = c2 / c1\n c4 = d4\n c5 = c4 / c2\n c6 = d5\n c7 = c6 / c4\n c8 = d6\n c9 = OperatedDict(get_agent())\n c10 = (c6 + c8) / c9\n try:\n c11 = average_call_duration_team(start_date, end_date)\n except AttributeError:\n c11 = {}\n lst = []\n for i in range(1, 5):\n lst.append({'apply_date': start_date, 'cycle': dct[i],\n 'all_overdue_loan': c1.get(i, 0),\n 'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),\n 'of_overdue_loans_entered_into_predict_call_system': round(c3.\n get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),\n 'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *\n 100, 1), 'connected_calls_automatic': c6.get(i, 0),\n 'connected_calls_automatic_completed_loans': round(c7.get(i, 0),\n 1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i, \n 0), 'average_calls_agent': round(c10.get(i, 0), 1),\n 'average_call_duration_team': round(c11.get(i, 0), 1)})\n ReportCollection.insert_many(lst).execute()\n logging.info('report_bomber_collection:Done!')\n\n\n<mask token>\n\n\ndef new_out_record(**kwargs):\n if not kwargs['application_ids']:\n return\n DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.\n bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.\n application << kwargs['application_ids'], DispatchAppHistory.out_at\n .is_null(True)).execute()\n if kwargs.get('month_dispatch'):\n return\n try:\n Application.update(ptp_bomber=None).where(Application.id << kwargs[\n 'application_ids']).execute()\n except Exception as e:\n logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[\n 'application_ids'], str(e)))\n\n\n<mask token>\n\n\ndef end_old_application(old_app, paid=False):\n if paid:\n if old_app.status == OldLoanStatus.WAITING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return\n if old_app.status == OldLoanStatus.PROCESSING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return old_app.application_id\n end_date = old_app.end_date\n now = datetime.now()\n if now >= max(end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n old_app.save()\n return old_app.application_id\n\n\n<mask token>\n\n\ndef get_before_bomber(date_time):\n begin_time = str(date_time - timedelta(days=7))\n end_time = str(date_time)\n old_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at<date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (begin_time, begin_time))\n old_data = run_one_sql(old_sql)\n new_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at> '%s' \n and created_at<'%s'\n and overdue_days=1;\n \"\"\"\n % (begin_time, end_time))\n new_data = run_one_sql(new_sql)\n dpd4_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>date_add('%s',interval 1 day) \n and created_at< date_add('%s',interval 1 day)\n and overdue_days=4;\n \"\"\"\n % (begin_time, end_time))\n dpd4_data = run_one_sql(dpd4_sql)\n dpd2_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at< date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (end_time, end_time))\n dpd2_data = run_one_sql(dpd2_sql)\n all_money = old_data + new_data\n repayment = all_money - dpd4_data - dpd2_data\n pro = 0\n if all_money:\n pro = repayment / all_money * 100\n RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,\n proportion=pro, repayment=repayment)\n\n\n<mask token>\n\n\ndef get_c1a_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=19)\n repayment_sql = (\n \"\"\"\n select \n sum(b.principal_part+b.late_fee_part) as paid_amount, \n cdt\n from \n (select \n br.principal_part, br.late_fee_part, \n date(cdt) as cdt, br.repay_at, br.application_id\n from (\n select ba.id, ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n if not repayment:\n return\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n for d in repayment:\n repay = d[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n d[1], RepaymentReportInto.cycle == Cycle.C1A.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = round(repay, 3)\n pro = repay / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_c2_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n not_contain_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_money = run_one_sql(not_contain_sql)\n begin_date = date_time - timedelta(days=37)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n not_contain_repay_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_repay = run_all_sql(not_contain_repay_sql)\n if not not_contain_money and repayment:\n return\n for i in ContainOut.values():\n if i == ContainOut.NOT_CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(\n not_contain_money, 3), proportion='0', repayment=0,\n is_first_loan=is_first_loan, contain_out=ContainOut.\n NOT_CONTAIN.value)\n for repay in not_contain_repay:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n elif i == ContainOut.CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(all_money, 3),\n proportion='0', repayment=0, is_first_loan=\n is_first_loan, contain_out=ContainOut.CONTAIN.value)\n for repay in repayment:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\ndef get_c3_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id, ba.c3_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=30)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c3_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c3_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = '%s'\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 4\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C3.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n if not repayment:\n return\n for repay in repayment:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n repay[1], RepaymentReportInto.cycle == Cycle.C3.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = repay_money\n pro = 0\n if report.all_money and int(report.all_money):\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_static_bomber(begin_date):\n active_date = begin_date - timedelta(days=8)\n bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),\n BomberR.last_active_at.alias('active')).where(BomberR.\n last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])\n summary = []\n for bomber in bombers:\n summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':\n bomber.role.cycle, 'work_ind': 0})\n SummaryBomber.insert_many(summary).execute()\n\n\n<mask token>\n\n\n@time_logger\ndef get_call_and_made(end_date, begin_date, real_time_query=False):\n call_sql = (\n \"\"\"\n select \n bomber_id, \n count(case when relationship is not null then application_id end) \n as 'call_cnt', \n count(distinct case when relationship is not null then \n application_id end) as 'call_case',\n count(case when phone_status=4 then application_id end) as 'connect',\n count(distinct case when phone_status=4 then application_id end) \n as 'connect_case'\n from (\n select bomber_id,application_id,phone_status, cycle, relationship\n from bomber.call_actions ba\n where created_at>'%s' and created_at<'%s'\n and type in (0, 1)\n ) a\n group by 1\n \"\"\"\n % (begin_date, end_date))\n calls = run_all_sql(call_sql)\n if real_time_query:\n return calls\n for call in calls:\n bomber, call_cnt, case_made, connect_cnt, case_connect = call\n SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,\n call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(\n SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date\n ).execute()\n return calls\n\n\n@time_logger\ndef get_claimed_cnt(end_date, begin_date, real_time_query=False):\n table_date = begin_date - timedelta(days=30)\n claimed_sql = (\n \"\"\"\n SELECT\n COUNT( `t1`.`application_id` ) AS cnt,\n `t1`.`bomber_id` AS bomber_id \n FROM\n `dispatch_app_history` AS t1 \n WHERE\n ( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null ) \n AND ( `t1`.`bomber_id` != 1000 ) \n AND ( `t1`.`partner_id` IS null ) \n AND ( `t1`.`entry_at` > '%s' ) \n AND ( `t1`.`entry_at` < '%s' ) \n GROUP BY\n `t1`.`bomber_id`\n \"\"\"\n % (begin_date, table_date, end_date))\n claimeds = run_all_sql(claimed_sql)\n if real_time_query:\n return claimeds\n for claimed in claimeds:\n cnt, bomber_id = claimed\n SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == bomber_id).execute()\n return claimeds\n\n\ndef get_sms_data(end_data, begin_data):\n all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(\n 'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')\n ).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.\n created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())\n ).group_by(ConnectHistoryR.operator)\n for sms in all_sms:\n SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==\n begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()\n return all_sms\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_NEW)\ndef summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_active_bomber(begin_date)\n get_call_and_made(end_date, begin_date)\n get_claimed_cnt(end_date, begin_date)\n get_sms_data(end_date, begin_date)\n get_ptp_data(end_date, begin_date)\n get_recover_amount(end_date, begin_date)\n get_unfollowed(begin_date)\n get_unfollowed_call(begin_date)\n\n\ndef get_new_case_amount(begin_date, end_date):\n all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.\n entry_late_fee_pending + DispatchAppHistoryR.\n entry_principal_pending).alias('pending'), DispatchAppHistoryR.\n bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')\n ).where(DispatchAppHistoryR.entry_at > begin_date, \n DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.\n partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)\n for case in all_case:\n SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt\n =case.cnt).where(SummaryBomber.bomber_id == case.bomber_id, \n SummaryBomber.time == begin_date).execute()\n return all_case\n\n\n<mask token>\n\n\n@time_logger\ndef get_new_case_call(begin_date, end_date, real_query_time=False):\n sql = (\n \"\"\"\n SELECT\n bd.bomber_id,\n count( DISTINCT bd.application_id )\n FROM\n bomber.dispatch_app_history bd\n INNER JOIN bomber.call_actions bc \n ON bd.application_id = bc.application_id \n AND bd.bomber_id = bc.bomber_id \n AND date( bd.entry_at ) = date( bc.created_at ) \n WHERE\n entry_at > '%s' \n AND entry_at < '%s' \n AND partner_id IS NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n new_case_calls = run_all_sql(sql)\n if real_query_time:\n return new_case_calls\n for call in new_case_calls:\n SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber\n .bomber_id == call[0], SummaryBomber.time == begin_date).execute()\n return new_case_calls\n\n\n@time_logger\ndef get_calltime_avg(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction,\n count( 1 ) AS auto_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' ' \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n AND an.recording is not null\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]][0] += data[1]\n result[data[0]][1] += data[2]\n continue\n result[data[0]] = [data[1], data[2]]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=\n value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == key).execute()\n return result\n\n\ndef get_no_calltime_avg(begin_date, end_date):\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND (an.status!='ANSWERED' or an.recording is null) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n for data in manuals:\n SummaryBomber.update(calltime_no_case_sum=data[1],\n calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /\n data[2] if data[2] else 0).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == data[0]).execute()\n\n\n@time_logger\ndef get_calltime_sum(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' '\n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]] += data[1]\n continue\n result[data[0]] = data[1]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == key).execute()\n return result\n\n\n<mask token>\n\n\n@action(MessageAction.UPDATE_SUMMARY_NEW)\ndef update_summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'UPDATE_SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_new_case_amount(begin_date, end_date)\n get_kp_cleared(begin_date, end_date)\n get_kp_today(begin_date, end_date)\n get_ptp_cnt(begin_date, end_date)\n get_ptp_call_cnt(begin_date, end_date)\n get_new_case_cleared(begin_date, end_date)\n get_new_case_call(begin_date, end_date)\n get_calltime_avg(begin_date, end_date)\n get_no_calltime_avg(begin_date, end_date)\n get_calltime_sum(begin_date, end_date)\n\n\ndef get_cycle_claimed(begin_date, end_date):\n sql = (\n \"\"\"\n select cycle,count(1)\n from bomber.application where cycle in (1,2,3,4)\n and (finished_at is null or (finished_at>'%s'))\n and created_at>'2018-09-01'\n group by 1\n \"\"\"\n % begin_date)\n result = run_all_sql(sql)\n return result\n\n\n@time_logger\ndef cycle_new_case(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( ba.id ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.created_at ) = bo.which_day_overdue \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c1b_entry ) = bo.which_day_overdue\n WHERE\n c1b_entry > '%s' \n AND c1b_entry < '%s' \n UNION\n SELECT\n 3 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c2_entry ) = bo.which_day_overdue \n WHERE\n c2_entry > '%s' \n AND c2_entry < '%s' \n UNION\n SELECT\n 4 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c3_entry ) = bo.which_day_overdue\n WHERE\n c3_entry > '%s' \n AND c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n all_datas = run_all_sql(sql)\n if real_time_query:\n return all_datas\n for data in all_datas:\n SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()\n return all_datas\n\n\n@time_logger\ndef get_cycle_new_case_call(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.created_at ) = date( bc.created_at ) \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c1b_entry ) = date( bc.created_at ) \n WHERE\n ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s'\n UNION\n SELECT\n 3 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c2_entry ) = date( bc.created_at ) \n WHERE\n ba.c2_entry > '%s' \n AND ba.c2_entry < '%s'\n UNION\n SELECT\n 4 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c3_entry ) = date( bc.created_at ) \n WHERE\n ba.c3_entry > '%s' \n AND ba.c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n cycle_datas = run_all_sql(sql)\n if real_time_query:\n return cycle_datas\n for data in cycle_datas:\n SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber\n .time == begin_date, SummaryBomber.cycle == data[0], \n SummaryBomber.bomber_id == data[0]).execute()\n return cycle_datas\n\n\ndef get_cycle_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n '1' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.created_at ) = date( br.repay_at ) \n WHERE ba.created_at > '%s' \n AND ba.created_at < '%s' \n GROUP BY 1, 2 ) a \n UNION\n SELECT \n '2' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n GROUP BY 1, 2) a\n \"\"\"\n % (begin_date, end_date, begin_date, end_date))\n cycle_cleared = run_all_sql(sql)\n for i in cycle_cleared:\n SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber\n .cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.\n time == begin_date).execute()\n\n\n<mask token>\n\n\ndef get_change_bomber():\n cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle\n .C3.value}\n result = {}\n bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,\n BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=\n BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.\n created_at) == date.today(), BomberLog.role_id << list(\n cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.\n instalment == 0).dicts()\n for b_log in bomber_logs:\n cycle = cycle_role_map.get(b_log['role_id'])\n group_id = b_log['group_id']\n if cycle in result:\n if group_id not in result[cycle]:\n result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}\n else:\n result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}}\n if b_log['operation'] == 0:\n result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])\n if result:\n bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map\n .keys())), Bomber.is_del == 0, Bomber.instalment == 0)\n for b in bombers:\n cycle_result = result.get(cycle_role_map[b.role_id], {})\n role_result = cycle_result.get(b.group_id)\n if not role_result:\n continue\n role_result['new_ids'].append(b.id)\n resutl_list = []\n for cycle, group_dict in result.items():\n resutl_list.extend(list(group_dict.values()))\n return resutl_list\n return []\n\n\n<mask token>\n\n\ndef get_surplus_application(new_ids, del_ids, average_nums, classified_apps):\n surplus_apps = []\n for del_id in del_ids:\n del_res = classified_apps.get(del_id, {})\n p_list = del_res.get('p_list', [])\n np_list = del_res.get('np_list', [])\n del_res['need_num'] = -(len(p_list) + len(np_list))\n del_res['to_list'] = np_list + p_list\n surplus_apps.extend(p_list)\n surplus_apps.extend(np_list)\n for index, bid in enumerate(new_ids):\n average = average_nums[index]\n bomber_app = classified_apps.get(bid)\n if not bomber_app:\n bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid\n ).first()\n bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':\n [], 'np_num': 0, 'need_num': average, 'partner_id': bomber.\n partner_id if bomber else ''}\n classified_apps[bid] = bomber_app\n else:\n p_num = len(bomber_app['p_list'])\n np_num = len(bomber_app['np_list'])\n if p_num > average:\n bomber_app['need_num'] = -np_num\n else:\n bomber_app['need_num'] = average - (p_num + np_num)\n bomber_app['p_num'] = p_num\n bomber_app['np_num'] = np_num\n if bomber_app['need_num'] < 0:\n random.shuffle(bomber_app['np_list'])\n res_over = bomber_app['np_list'][:-bomber_app['need_num']]\n bomber_app['to_list'] = res_over\n surplus_apps.extend(res_over)\n classified_apps_list = sorted(classified_apps.values(), key=lambda x: x\n ['need_num'], reverse=True)\n return surplus_apps, classified_apps_list\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)\ndef bomber_dispatch_applications(payload, msg_id):\n change_bombers = get_change_bomber()\n instalment_change_bombers = get_instalment_change_bomber()\n params = {ApplicationType.CASH_LOAN.value: change_bombers,\n ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}\n for type, bombers in params.items():\n change_bomber_dispatch_apps(change_bombers=bombers, type=type)\n\n\n<mask token>\n\n\ndef get_summary_daily_time():\n mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')\n mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')\n now_date = datetime.now()\n now_date_time = now_date.time()\n today_str = str(now_date.date())\n if now_date_time < mid_time_t1.time():\n yes_date = now_date - timedelta(days=1)\n yes_date_str = str(yes_date.date())\n begin_str = yes_date_str + ' 17:20:00'\n end_str = today_str + ' 00:00:00'\n elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():\n begin_str = today_str + ' 00:00:00'\n end_str = today_str + ' 12:40:00'\n else:\n begin_str = today_str + ' 12:40:00'\n end_str = today_str + ' 17:20:00'\n begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')\n end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')\n summary_datetime = now_date - timedelta(minutes=30)\n summary_date = summary_datetime.date()\n return begin_time, end_time, summary_date\n\n\n@action(MessageAction.SUMMARY_DAILY)\ndef summary_daily_data(payload, msg_id):\n begin_time, end_time, summary_date = get_summary_daily_time()\n call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.\n bomber_id, CallActionsR.application_id, CallActionsR.promised_date,\n CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(\n CallActionsR.created_at >= begin_time, CallActionsR.created_at <\n end_time, CallActionsR.type << (0, 1))\n summary_dailys = {}\n for call in call_actions:\n if call.bomber_id not in summary_dailys:\n summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.\n bomber_id, 'summary_date': str(summary_date)}\n if call.name and call.number:\n summary_dailys[call.bomber_id]['call_cnt'] += 1\n if call.promised_date:\n summary_dailys[call.bomber_id]['ptp_cnt'] += 1\n C1_sql = (\n \"\"\"\n SELECT a.current_bomber_id,\n sum(principal_part+late_fee_part) as pay_amount,a.cycle\n from \n (select a.cycle,a.current_bomber_id,b.username,a.principal_part,\n a.late_fee_part,a.application_id,a.repay_at\n FROM bomber.repayment_log a ,bomber.bomber b\n WHERE a.repay_at >= '%s' AND a.repay_at <'%s'\n AND a.current_bomber_id !=''\n AND a.current_bomber_id = b.id\n and b.role_id in (1,2,4,5)\n and principal_part+late_fee_part>0\n group by 6,7) a\n GROUP BY a.cycle,a.current_bomber_id\n \"\"\"\n % (begin_time, end_time))\n C1_repayment = run_all_sql(C1_sql)\n other_sql = (\n \"\"\"\n select current_bomber_id,sum(pay_amount) as pay_amount,cycle\n from (\n select application_id,current_bomber_id,pay_amount,repay_at,cycle\n from (\n select br.application_id,br.current_bomber_id,\n br.principal_part+br.late_fee_part as pay_amount,br.repay_at,\n br.cycle\n from bomber.repayment_log br\n left join bomber.bomber bb on br.current_bomber_id=bb.id\n where exists (select 1 from bomber.bombing_history bb \n where br.current_bomber_id=bb.bomber_id \n and br.application_id=bb.application_id \n and bb.created_at<br.repay_at \n and (bb.promised_date is not null \n or bb.promised_amount is not null))\n and br.repay_at >= '%s'\n and br.repay_at < '%s'\n and bb.role_id in (3,6,7,8,9) \n and br.principal_part+br.late_fee_part > 0\n group by 1,4\n ) a\n group by 1,4) b\n group by 1\n \"\"\"\n % (begin_time, end_time))\n other_repayment = run_all_sql(other_sql)\n all_repayment = C1_repayment + other_repayment\n for res in all_repayment:\n bomber_id, pay_amount, cycle = res\n if bomber_id in summary_dailys:\n summary_dailys[bomber_id]['repayment'] += pay_amount\n else:\n summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': cycle, 'repayment': pay_amount, 'bomber_id':\n bomber_id, 'summary_date': str(summary_date)}\n insert_values = list(summary_dailys.values())\n if insert_values:\n SummaryDaily.insert_many(insert_values).execute()\n\n\n<mask token>\n\n\ndef month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):\n apps = list(apps)\n np_apps = list(np_apps)\n random.shuffle(np_apps)\n apps_len = len(apps)\n np_apps_len = len(np_apps)\n end = 0\n all_app_precentage = 0\n partners = Partner.select().where(Partner.cycle == cycle, Partner.\n status == PartnerStatus.NORMAL.value)\n for p in partners:\n all_app_precentage += p.app_percentage\n for partner in partners:\n bombers = Bomber.select().where(Bomber.partner == partner.id, \n Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.\n value)\n bids = {b.id: b for b in bombers}\n if len(bids) == 0:\n logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))\n continue\n start = end\n if np_apps_len >= int(apps_len * all_app_precentage):\n end = start + int(apps_len * partner.app_percentage)\n else:\n end = start + int(np_apps_len * partner.app_percentage /\n all_app_precentage)\n partner_app = np_apps[start:end]\n dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)\n np_apps = np_apps[end:]\n return np_apps\n\n\ndef month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):\n sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',\n (4): 'AB_TEST_C3'}\n sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])\n sys_values = json.loads(sys_config.value)\n bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0\n )\n if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):\n bombers = bombers.where(Bomber.instalment == 0)\n bids = {b.id: b for b in bombers}\n if cycle == Cycle.C1A.value:\n np_ids = [a['id'] for a in np_apps]\n np = Application.update(status=ApplicationStatus.PROCESSING.value,\n ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids\n ).execute()\n bomber_app_logs = app_logs.get(cycle, {})\n out_param = {'application_ids': bomber_app_logs.get('to_ids', []),\n 'month_dispatch': 1, 'src_bomber_id': cycle}\n new_out_record(**out_param)\n in_param = {'cycle': cycle, 'application_ids': np_ids,\n 'dest_bomber_id': cycle}\n new_in_record(**in_param)\n bomber_app_logs['need_num'] = len(np_apps)\n bomber_app_logs['form_ids'] = np_ids\n bomber_app_logs['status'] = 1\n else:\n dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)\n dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)\n\n\ndef dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,\n type=ApplicationType.CASH_LOAN.value):\n apps = list(apps)\n random.shuffle(apps)\n bids_list = list(bids.keys())\n if len(bids_list) <= 0:\n logging.info('get_dispatch_app_to_bomber no bids')\n return\n average_num = get_average_number(len(apps), len(bids_list))\n bomber_end = 0\n with db.atomic():\n for index, bid in enumerate(bids_list):\n current_bomber = bids.get(bid)\n bomber_app_logs = app_logs.get(bid, {})\n bomber_start = bomber_end\n bomber_end = bomber_start + average_num[index]\n bomber_apps = apps[bomber_start:bomber_end]\n from_p, from_np, from_ids, status = [], [], [], 0\n for ba in bomber_apps:\n promised_date = ba.get('promised_date')\n from_ids.append(ba['id'])\n if promised_date and promised_date.date() >= date.today():\n from_p.append(ba['id'])\n else:\n from_np.append(ba['id'])\n app_status = ApplicationStatus.AB_TEST.value\n if (cycle == Cycle.C1A.value and not out_partner and type ==\n ApplicationType.CASH_LOAN.value):\n app_status = ApplicationStatus.PROCESSING.value\n if from_p:\n p = Application.update(ptp_bomber=bid, latest_bomber=bid,\n status=app_status).where(Application.id << from_p).execute(\n )\n p_ids = bomber_app_logs.get('p_ids', []) + from_p\n bomber_app_logs['p_ids'] = p_ids\n if from_np:\n np = Application.update(latest_bomber=bid, ptp_bomber=None,\n status=ApplicationStatus.AB_TEST.value).where(\n Application.id << from_np).execute()\n np_ids = bomber_app_logs.get('np_ids', []) + from_np\n bomber_app_logs['np_ids'] = np_ids\n in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.\n partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}\n if type == ApplicationType.CASH_LOAN.value:\n out_param = {'src_bomber_id': bid, 'application_ids':\n bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}\n new_out_record(**out_param)\n new_in_record(**in_param)\n else:\n out_and_in_record_instalment(**in_param)\n bomber_app_logs['status'] = 1\n need_num = bomber_app_logs.get('need_num', 0) + average_num[index]\n bomber_app_logs['need_num'] = need_num\n all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids\n bomber_app_logs['form_ids'] = all_form_ids\n if not out_partner:\n continue\n try:\n DispatchApp.delete().where(DispatchApp.application.in_(\n from_ids)).execute()\n dispatch_ins = [{'application': id, 'partner':\n current_bomber.partner_id, 'bomber': bid, 'status':\n DisAppStatus.NORMAL.value} for id in from_ids]\n DispatchApp.insert_many(dispatch_ins).execute()\n except Exception as e:\n logging.info(\n 'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str\n (e), bid, from_ids))\n\n\n<mask token>\n\n\ndef calc_entry_time(overdue_days):\n app_entry_time = {}\n overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],\n 'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}\n for key, value in overdue_entry.items():\n if value[0] <= overdue_days <= value[1]:\n app_entry_time[key] = datetime.now()\n else:\n app_entry_time[key] = None\n return app_entry_time\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_BOMBER_OVERDUE)\ndef summary_bomber_overdue_everyday(payload, msg_id):\n cycle_list = Cycle.values()\n which_day = date.today()\n for cycle in cycle_list:\n apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,\n ApplicationR.ptp_bomber, ApplicationR.overdue_days,\n ApplicationR.promised_date, ApplicationR.follow_up_date,\n ApplicationR.external_id, OverdueBillR.status, OverdueBillR.\n periods, OverdueBillR.sub_bill_id).join(OverdueBillR,\n JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id\n ).where(ApplicationR.status != ApplicationStatus.REPAID.value, \n ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()\n bomber_overdue_list = []\n for app in apps:\n status = app.get('status')\n if status == ApplicationStatus.REPAID.value:\n continue\n ptp_bomber = app.get('ptp_bomber')\n promised_date = app.get('promised_date')\n follow_up_date = app.get('follow_up_date')\n if not promised_date or promised_date.date() < date.today():\n ptp_bomber = promised_date = None\n if not follow_up_date or follow_up_date.date() < date.today():\n follow_up_date = None\n overdue_dict = {'collection_id': app.get('id'), 'external_id':\n app.get('external_id'), 'sub_bill_id': app.get(\n 'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app\n .get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':\n ptp_bomber, 'promised_date': promised_date,\n 'follow_up_date': follow_up_date, 'which_day': which_day,\n 'overdue_days': app.get('overdue_days')}\n bomber_overdue_list.append(overdue_dict)\n try:\n if bomber_overdue_list:\n with db.atomic():\n for index in range(0, len(bomber_overdue_list), 1000):\n insert_list = bomber_overdue_list[index:index + 1000]\n BomberOverdue.insert_many(insert_list).execute()\n except Exception as e:\n logging.info(\n 'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %\n (cycle, str(which_day), str(e)))\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)\ndef today_ptp_auto_call_switch(payload, msg_id):\n today = datetime.today().date()\n next_day = today + timedelta(days=1)\n apps = ApplicationR.select(ApplicationR.latest_bomber).where(\n ApplicationR.promised_date < next_day, ApplicationR.promised_date >=\n today, ApplicationR.promised_date.is_null(False), ApplicationR.\n status != ApplicationStatus.REPAID.value, ApplicationR.cycle <\n Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(\n ApplicationR.latest_bomber)\n bids = [a.latest_bomber_id for a in apps]\n if not bids:\n return\n q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(\n BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids\n ).execute()\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)\ndef update_today_switch_every_day(payload, msg_id):\n q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(\n BomberPtp.auto_ext.is_null(False)).execute()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@action(MessageAction.BOMBER_HEALTH_CHECK)\ndef health_check(payload, msg_id):\n pass\n\n\n<mask token>\n\n\ndef dpd1_process(lst):\n \"\"\"已废弃的方法\"\"\"\n if not lst:\n return\n for key, l in lst.items():\n rule = getattr(BeforeInBomber, key).value\n query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)\n ).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(\n rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.\n call_success()))\n success_set = {i.loanid for i in query}\n failed_list = list(set(l) - success_set)\n post_params = {'$and': rule.get('$and'), 'app_list': failed_list}\n resp = Hyperloop().post('/bomber/score/verify', json=post_params)\n if not resp.ok:\n logging.error('hyperloop score verification failed: %s, %s',\n str(resp.status_code), str(resp.text))\n logging.error('hyperloop score verification failed: %s', str(\n post_params))\n continue\n logging.debug('hyperloop score verification success: %s', resp.content)\n resp_json = resp.json()\n app_list = resp_json['data']\n if not app_list:\n continue\n for item in app_list:\n if random.randint(0, 5) == 1:\n send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':\n int(item)})\n\n\n@action(MessageAction.GET_IVR)\ndef get_ivr(payload, msg_id):\n logging.warning('start get_ivr')\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'DPD1-3_INTO_IVR').first()\n now = date.today()\n if sys_config and sys_config.value:\n start = now - timedelta(days=3)\n else:\n start = now\n end = now + timedelta(days=4)\n item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()\n if not item:\n AutoIVR.delete().execute()\n current_page = 0\n elif item.current_page >= item.total_page:\n return\n else:\n current_page = item.current_page\n auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,\n 'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,\n 'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':\n 10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,\n 'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,\n 'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,\n 'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,\n 'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,\n 'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,\n 'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,\n 'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,\n 'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,\n 'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,\n 'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':\n 101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,\n 'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,\n 'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,\n 'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}\n current_page += 1\n with db.atomic() as transaction:\n while True:\n bill_service = BillService()\n ivr_action = bill_service.ivr_pages(page=current_page,\n page_size=500, start_time=utc_datetime(str(start)),\n end_time=utc_datetime(str(end)))\n result = ivr_action['result']\n page_size = int(ivr_action.get('page_size', 0))\n total_page = int(ivr_action.get('total_page', 0))\n insert_args = []\n for a in result:\n due_at = no_utc_datetime(a['due_at'])\n days = (due_at.date() - now).days\n if days == 2:\n continue\n if days > 0:\n time = str(days)\n else:\n time = str(days).replace('-', 'PDP')\n key = a['app_name'] + str(a['su']) + time\n group = auto_ivr.get(key)\n user_id = a['user_id']\n try:\n user_resp = AccountService().get_user(path_params={\n 'user_id': user_id})\n if str(user_resp['mobile_no']) == str(a['user_mobile_no']):\n numbers = a['user_mobile_no']\n else:\n numbers = a['user_mobile_no'] + ',' + user_resp.get(\n 'mobile_no')\n except:\n logging.error('Request Account Service Error.')\n numbers = a['user_mobile_no']\n insert_args.append({'application_id': a['id'], 'numbers':\n numbers, 'group': group, 'user_id': user_id})\n AutoIVR.insert_many(insert_args).execute()\n if current_page == 1:\n IVRActionLog.create(total_page=total_page, proc_date=now,\n page_size=page_size, current_page=current_page)\n item = IVRActionLog.get(IVRActionLog.proc_date == now)\n else:\n item.current_page = current_page\n item.page_size = page_size\n item.total_page = total_page\n item.save()\n transaction.commit()\n current_page += 1\n if current_page > int(total_page):\n break\n if sys_config and sys_config.value:\n try:\n classfiy_dpd_ptp_apps()\n except Exception as e:\n logging.error('dpd1-3_test_error:%s' % str(e))\n\n\ndef ivr_t2_test():\n t2_groups = [39, 40, 41, 42, 43, 44]\n ivr_test_proportion = 0.2\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'IVR_TEST_PROPORTION').first()\n if sys_config and sys_config.value:\n ivr_test_proportion = float(sys_config.value)\n t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.\n status == AutoIVRStatus.AVAILABLE.value)\n t2_dict = defaultdict(list)\n for ivr in t2_ivrs:\n t2_dict[ivr.group].append(ivr.id)\n test_ivr_ids = []\n for group, ivr_ids in t2_dict.items():\n number = ceil(len(ivr_ids) * ivr_test_proportion)\n test_ivr_ids += ivr_ids[:number]\n if not test_ivr_ids:\n return\n q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.\n group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.APP_MERGE)\n@deprecated(version='1.0', reason='This function will be removed soon')\ndef app_merge(payload, msg_id):\n sql = \"\"\"\n select *\n from (\n select a.id as id\n from dashboard.application as a\n inner join repayment.bill2 as b on b.external_id = a.id\n where not exists (\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.app = 'DanaCepat'\n and a.is_first_loan = 1\n and a.apply_at < '2018-08-23 20:50:00'\n and b.overdue_days between 1 and 3\n and b.status != 2) result\n where not exists (\n select 1\n from bomber.application as a\n where a.cycle = 1\n and a.status = 4\n and a.id = result.id\n )\n \"\"\"\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n new_data = cursor.fetchall()\n cursor.close()\n if new_data:\n bomber = [103, 104]\n for d in new_data:\n app_id = {'id': d[0]}\n application_overdue(app_id, None)\n Application.update(status=ApplicationStatus.AB_TEST.value,\n latest_bomber=random.choice(bomber), ptp_bomber=None).where(\n Application.id == d[0]).execute()\n logging.warning('add new app success')\n ptp = date.today() - timedelta(days=1)\n del_sql = (\n \"\"\"\n select a.id\n from bomber.application as a\n where exists(\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.cycle = 1\n and a.status = 4\n and (a.promised_date is null or a.promised_date < \"%s\")\n \"\"\"\n % ptp)\n cursor = readonly_db.get_cursor()\n cursor.execute(del_sql)\n del_date = cursor.fetchall()\n cursor.close()\n if del_date:\n return\n ids = list()\n for d in del_date:\n ids.append(d[0])\n Application.update(status=ApplicationStatus.UNCLAIMED.value,\n latest_bomber=None).where(Application.id << ids).execute()\n\n\n@action(MessageAction.APPLICATION_BOMBER)\ndef application_overdue(payload, msg_id):\n application_id = payload['id']\n sub_bill_id = payload['bill_sub_id']\n local_app = Application.select().where(Application.external_id ==\n application_id).order_by(Application.finished_at).first()\n if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:\n logging.info('application %s overdue, already exists', application_id)\n add_contact(local_app)\n return\n if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:\n overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==\n sub_bill_id, OverdueBillR.external_id == application_id)\n if overdue_bill.exists():\n logging.info(\n 'application %s,sub_bill_id %s overdue, already exists' % (\n application_id, sub_bill_id))\n return\n try:\n sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])\n sub_bill = sub_bill[0]\n except Exception:\n logging.error(\n 'application %s overdue, get sub_bill info failed:Request To repayment Error'\n , application_id)\n return\n if sub_bill['status'] == 2:\n logging.error('application %s overdue, but bills already cleared',\n application_id)\n return\n overdue_days = sub_bill.get('overdue_days', 0)\n if overdue_days == 0:\n logging.info('application {} no overdue'.format(str(application_id)))\n return\n gold_eye = GoldenEye().get('/applications/%s' % application_id)\n if not gold_eye.ok:\n logging.error('get application %s failed: Request to GoldenEye.',\n application_id)\n return\n gold_app = gold_eye.json().get('data')\n user_id = gold_app['user_id']\n apply_history = Dashboard().get('/users/%s/apply-history' % user_id)\n if not apply_history.ok:\n logging.error(\n 'get user %s apply history failed: Request to Dashboard Failed.',\n user_id)\n return\n history = apply_history.json().get('data')\n loan_success_times = len([(1) for i in history if i['status'] in [80, \n 90, 100, 70] and i['id'] != gold_app['id']])\n id = application_id\n type = ApplicationType.CASH_LOAN.value\n bill_id = sub_bill.get('bill_id')\n amount = sub_bill.get('amount')\n amount_net = sub_bill.get('amount_net')\n interest_rate = sub_bill.get('interest_rate')\n overdue_days = sub_bill.get('overdue_days')\n origin_due_at = sub_bill.get('origin_due_at')\n sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,\n 'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),\n 'overdue_days': overdue_days, 'origin_due_at': origin_due_at,\n 'amount': amount, 'amount_net': amount_net, 'interest_rate':\n interest_rate, 'external_id': application_id}\n if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:\n if local_app and local_app.status != ApplicationStatus.REPAID.value:\n sub_overdue_bill['collection_id'] = local_app.id\n local_app.amount += amount\n local_app.amount_net += amount_net\n local_app.save()\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('application %s,sub_bill_id:%s overdue created' %\n (application_id, sub_bill_id))\n return\n else:\n id = idg()\n type = ApplicationType.CASH_LOAN_STAGING.value\n sub_overdue_bill['collection_id'] = id\n ptp_info = BombingHistory.filter(BombingHistory.application == id).first()\n promised_amount = ptp_info and ptp_info.promised_amount\n promised_date = ptp_info and ptp_info.promised_date\n application = Application.create(id=id, user_id=gold_app['user_id'],\n user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[\n 'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],\n contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(\n 'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=\n birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(\n gold_app.get('id_ektp')), profile_province=(gold_app.get(\n 'profile_province') or {}).get('name'), profile_city=(gold_app.get(\n 'profile_city') or {}).get('name'), profile_district=(gold_app.get(\n 'profile_district') or {}).get('name'), profile_residence_time=\n gold_app.get('profile_residence_time'), profile_residence_type=\n gold_app.get('profile_residence_type'), profile_address=gold_app.\n get('profile_address'), profile_education=gold_app.get(\n 'profile_education'), profile_college=(gold_app.get(\n 'profile_college') or {}).get('name'), job_name=gold_app.get(\n 'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get\n ('job_bpjs'), job_user_email=gold_app.get('job_user_email'),\n job_type=gold_app.get('job_type'), job_industry=gold_app.get(\n 'job_industry'), job_department=gold_app.get('job_department'),\n job_province=(gold_app.get('job_province') or {}).get('name'),\n job_city=(gold_app.get('job_city') or {}).get('name'), job_district\n =(gold_app.get('job_district') or {}).get('name'), job_address=\n gold_app.get('job_address'), amount=amount, amount_net=amount_net,\n interest_rate=interest_rate, term=gold_app.get('term'),\n origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=\n sub_bill.get('repay_at'), loan_success_times=loan_success_times,\n arrived_at=datetime.now(), follow_up_date=datetime.now(),\n promised_amount=promised_amount, promised_date=promised_date,\n external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=\n datetime.now())\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('overdue application %s created', application_id)\n Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,\n status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)\n add_contact(application)\n\n\ndef add_contact(application):\n logging.info('start add contact for application: %s', application.id)\n contacts = Contact.filter(Contact.user_id == application.user_id)\n existing_numbers = {contact.number for contact in contacts}\n insert_contacts = list()\n mon_insert_contact = {}\n user_mobile_no = number_strip(application.user_mobile_no)\n if user_mobile_no and user_mobile_no not in existing_numbers:\n insert_contacts.append({'user_id': application.user_id, 'name':\n application.user_name, 'number': user_mobile_no, 'relationship':\n Relationship.APPLICANT.value, 'source': 'apply info',\n 'real_relationship': Relationship.APPLICANT.value})\n existing_numbers.add(number_strip(application.user_mobile_no))\n extra_phone = GoldenEye().get('/users/%s/extra-phone' % application.user_id\n )\n if not extra_phone.ok:\n extra_phone = []\n logging.error('get user %s extra contacts failed', application.user_id)\n else:\n extra_phone = extra_phone.json()['data']\n if extra_phone:\n for i in extra_phone:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({'user_id': application.user_id, 'name':\n application.user_name, 'number': number, 'relationship':\n Relationship.APPLICANT.value, 'source': 'extra phone',\n 'real_relationship': Relationship.APPLICANT.value})\n key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n ec_contact = []\n contact = json.loads(application.contact or '[]')\n for i in contact:\n if number_strip(i['mobile_no']\n ) not in existing_numbers and number_strip(i['mobile_no']):\n ec_contact.append({'user_id': application.user_id, 'name': i[\n 'name'], 'number': number_strip(i['mobile_no']),\n 'relationship': Relationship.FAMILY.value, 'sub_relation':\n SubRelation.EC.value, 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number_strip(i['mobile_no']\n ), ContactType.F_EC.value\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['mobile_no']))\n if i['type'] != 1:\n continue\n if number_strip(i['tel_no']) not in existing_numbers and number_strip(i\n ['tel_no']):\n ec_contact.append({'user_id': application.user_id, 'name': i[\n 'name'], 'number': number_strip(i['tel_no']),\n 'relationship': Relationship.FAMILY.value, 'sub_relation':\n SubRelation.EC.value, 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number_strip(i['tel_no']\n ), ContactType.F_EC.value\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['tel_no']))\n if ec_contact:\n Contact.insert_many(ec_contact).execute()\n if all((application.job_tel, number_strip(application.job_tel), \n number_strip(application.job_tel) not in existing_numbers)):\n insert_contacts.append({'user_id': application.user_id, 'name':\n None, 'number': number_strip(application.job_tel),\n 'relationship': Relationship.COMPANY.value, 'source':\n 'basic info job_tel', 'real_relationship': Relationship.COMPANY\n .value})\n key = user_mobile_no, number_strip(application.job_tel\n ), ContactType.C_BASIC_INFO_JOB_TEL.value\n mon_insert_contact[key] = 1, 0, None\n existing_numbers.add(number_strip(application.job_tel))\n sms_contacts = GoldenEye().get('/applications/%s/sms-contacts' %\n application.external_id)\n if not sms_contacts.ok:\n sms_contacts = []\n logging.info('get user %s sms contacts failed', application.external_id\n )\n else:\n sms_contacts = sms_contacts.json()['data']\n if sms_contacts:\n for i in sms_contacts:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({'user_id': application.user_id, 'name':\n i['name'][:128], 'number': number, 'relationship':\n Relationship.SUGGESTED.value, 'source': 'sms contacts',\n 'real_relationship': Relationship.SUGGESTED.value})\n key = user_mobile_no, number, ContactType.S_SMS_CONTACTS.value\n mon_insert_contact[key] = 1, 0, i['name'][:128]\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n cf = GoldenEye().get('/applications/%s/call/frequency' % application.\n external_id)\n if not cf.ok:\n call_frequency = []\n logging.error('get application %s call frequency error',\n application.external_id)\n else:\n call_frequency = cf.json()['data']\n insert_contacts = []\n fm = GoldenEye().get('/applications/%s/contact/family-member' %\n application.external_id)\n if not fm.ok:\n family = []\n logging.error('get application %s family-member info error',\n application.external_id)\n else:\n family = fm.json()['data']\n if family:\n for i in family:\n if not i.get('number'):\n logging.info('family member %s' % str(i))\n continue\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n logging.info('family members: %s' % str(i))\n insert_contacts.append({'user_id': application.user_id, 'name':\n i['name'][:128], 'number': number, 'relationship':\n Relationship.FAMILY.value, 'source': FamilyContactType.\n CALLEC.value, 'total_count': i.get('total_count', 1),\n 'total_duration': i.get('total_duration', 0),\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number, ContactType.F_CALL_EC.value\n mon_insert_contact[key] = i.get('total_count', 1), i.get(\n 'total_duration', 0), i['name'][:128]\n existing_numbers.add(number)\n mon_update_contact = {}\n if call_frequency:\n with db.atomic():\n count = 1\n for i in call_frequency:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n Contact.update(total_count=i['total_count'],\n total_duration=i['total_duration']).where(Contact.\n number == number, Contact.user_id == application.\n user_id)\n key = user_mobile_no, number\n mon_update_contact[key] = i['total_count'], i[\n 'total_duration']\n continue\n if count < 6:\n insert_contacts.append({'user_id': application.user_id,\n 'name': i['name'][:128], 'number': number,\n 'relationship': Relationship.FAMILY.value,\n 'total_count': i['total_count'], 'total_duration':\n i['total_duration'], 'source': FamilyContactType.\n CALLTOP5.value, 'real_relationship': Relationship.\n FAMILY.value})\n count += 1\n key = user_mobile_no, number, ContactType.F_CALL_TOP5.value\n mon_insert_contact[key] = i['total_count'], i[\n 'total_duration'], i['name'][:128]\n else:\n insert_contacts.append({'user_id': application.user_id,\n 'name': i['name'][:128], 'number': number,\n 'relationship': Relationship.SUGGESTED.value,\n 'total_count': i['total_count'], 'total_duration':\n i['total_duration'], 'source': 'call frequency',\n 'real_relationship': Relationship.SUGGESTED.value})\n key = (user_mobile_no, number, ContactType.\n S_CALL_FREQUENCY.value)\n mon_insert_contact[key] = i['total_count'], i[\n 'total_duration'], i['name'][:128]\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n next_apply_list = AccountService().add_contact(application.user_id)\n for next_apply in next_apply_list:\n number = number_strip(str(next_apply))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=application.\n user_name, number=number, relationship=Relationship.\n SUGGESTED.value, source='online profile phone',\n real_relationship=Relationship.SUGGESTED.value)\n key = (user_mobile_no, number, ContactType.\n S_ONLINE_PROFILE_PHONE.value)\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n next_applicant = GoldenEye().get('/bomber/%s/dual_contact' %\n application.user_id)\n if not next_applicant.ok:\n next_applicant = []\n logging.error('get user %s dual_contact contacts failed' %\n application.user_id)\n else:\n next_applicant = next_applicant.json()['data']\n if next_applicant:\n for i in next_applicant:\n number = number_strip(str(i))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=\n application.user_name, number=number, relationship=\n Relationship.APPLICANT.value, source='apply info',\n real_relationship=Relationship.APPLICANT.value)\n key = user_mobile_no, number, ContactType.A_APPLY_INFO.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n numbers = []\n try:\n numbers = AccountService().ktp_number(path_params={'user_id':\n application.user_id})\n except Exception as e:\n logging.info('request ktp numbers failed %s' % str(e))\n for n in numbers:\n number = number_strip(str(n))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=application.\n user_name, number=number, relationship=Relationship.\n APPLICANT.value, source='ktp number', real_relationship=\n Relationship.APPLICANT.value)\n key = user_mobile_no, number, ContactType.A_KTP_NUMBER.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n try:\n ecs = GoldenEye().get('/applications/%s/contact/ec' % application.\n external_id)\n except Exception as e:\n logging.info('request ec-member error: %s' % str(e))\n try:\n if not ecs.ok:\n ec = []\n logging.info('get application %s ec-member info error',\n application.external_id)\n else:\n ec = ecs.json()['data']\n if ec:\n for e in ec:\n number = number_strip(e['numbers'])[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=e[\n 'name'][:128], number=number, relationship=\n Relationship.FAMILY.value, source=FamilyContactType\n .CONTACTEC.value, real_relationship=Relationship.\n FAMILY.value)\n key = (user_mobile_no, number, ContactType.F_CONTACT_EC\n .value)\n mon_insert_contact[key] = 1, 0, e['name'][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add ec_member error:%s' % str(e))\n try:\n mn = GoldenEye().get('/applications/%s/contact/my_number' %\n application.external_id)\n except Exception as e:\n logging.info('request my_number error: %s' % str(e))\n try:\n if not mn.ok:\n my = []\n logging.info('get application %s my_number info error',\n application.external_id)\n else:\n my = mn.json()['data']\n if my:\n for m in my:\n number = number_strip(m)[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=my[m][\n :128], number=number, relationship=Relationship.\n SUGGESTED.value, source='my number',\n real_relationship=Relationship.SUGGESTED.value)\n key = user_mobile_no, number, ContactType.S_MY_NUMBER.value\n mon_insert_contact[key] = 1, 0, my[m][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add my_member error:%s' % str(e))\n try:\n cn = GoldenEye().get('/applications/%s/contact/company-number' %\n application.external_id)\n except Exception as e:\n logging.info('request company-number error: %s' % str(e))\n try:\n if not cn.ok:\n cn = []\n logging.info('get application %s company_number info error',\n application.external_id)\n else:\n cn = cn.json()['data']\n if cn:\n for c in cn:\n number = c\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=cn[c][\n :128], number=number, relationship=Relationship.\n COMPANY.value, source='company', real_relationship=\n Relationship.COMPANY.value)\n key = user_mobile_no, number, ContactType.C_COMPANY.value\n mon_insert_contact[key] = 1, 0, cn[c][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add company_member error:%s' % str(e))\n try:\n ol = AccountService().other_login_contact(userId=application.user_id)\n except Exception as e:\n logging.error('request other_login error: %s' % e)\n ol = {}\n try:\n for o in ol:\n number = number_strip(o)\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=ol[o][:128\n ], number=number, relationship=Relationship.SUGGESTED.\n value, source='other_login', real_relationship=\n Relationship.SUGGESTED.value)\n key = user_mobile_no, number, ContactType.S_OTHER_LOGIN.value\n mon_insert_contact[key] = 1, 0, ol[o][:128]\n except Exception as e:\n logging.error('add other_login number error:%s' % e)\n logging.info('add contact for application %s finished', application.id)\n if mon_insert_contact or mon_update_contact:\n send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON, {\n 'user_mobile_no': user_mobile_no, 'insert_contact': str(\n mon_insert_contact), 'update_contact': str(mon_update_contact),\n 'user_id': application.user_id, 'name': application.user_name})\n\n\n<mask token>\n\n\ndef get_contact_from_mongo(number):\n if not number:\n return []\n query = TotalContact.objects(src_number=number, source__in=TotalContact\n .available()).order_by('source')\n lst = []\n for c in query:\n relation = TotalContact.relationship(c.source)\n if relation == -1:\n continue\n source = TotalContact.str_source(c.source)\n if not source:\n continue\n lst.append({'related_number': c.dest_number, 'source': source,\n 'is_calc': c.is_calc, 'total_count': c.total_count,\n 'total_duration': c.total_duration, 'relation': relation,\n 'name': c.dest_name})\n return lst\n\n\n<mask token>\n\n\ndef check_key_not_none(payload, keys):\n for key in keys:\n if payload.get(key) is None:\n logging.error('Missing args {}'.format(str(key)))\n return False\n return True\n\n\n@action(MessageAction.BILL_PAID)\ndef bill_paid(payload, msg_id):\n validate = check_key_not_none(payload, ['external_id', 'late_fee_part',\n 'principal_part', 'paid_at', 'bill_sub_id', 'partner_bill_id'])\n if not validate:\n logging.error('payload key not fully pass in.')\n return\n external_id = payload['external_id']\n late_fee_part = Decimal(payload['late_fee_part'])\n principal_part = Decimal(payload['principal_part'])\n paid_at = payload['paid_at']\n partner_bill_id = payload['partner_bill_id']\n logging.debug(\n 'application %s paid principal part %s, paid late fee part %s',\n external_id, principal_part, late_fee_part)\n application = Application.filter(Application.external_id == external_id\n ).order_by(-Application.created_at).first()\n if not application:\n logging.info('application %s paid, not found application', external_id)\n return\n sub_bill_id = payload['bill_sub_id']\n overdue_bill = OverdueBillR.select().where(OverdueBillR.collection_id ==\n application.id, OverdueBillR.sub_bill_id == sub_bill_id).first()\n if (application.type == ApplicationType.CASH_LOAN_STAGING.value and not\n overdue_bill):\n logging.info('bill sub not in bomber %s', sub_bill_id)\n return\n with db.atomic():\n repay_at = str_no_utc_datetime(payload['latest_repay_at'])\n Application.update(repay_at=repay_at).where(Application.id ==\n application.id).execute()\n RepaymentLog.create(application=application.id, is_bombed=True,\n current_bomber=application.latest_bomber_id, cycle=application.\n cycle, principal_part=principal_part, late_fee_part=\n late_fee_part, repay_at=paid_at, ptp_bomber=application.\n ptp_bomber, latest_call=application.latest_call, periods=\n overdue_bill.periods if overdue_bill else None, overdue_bill_id\n =overdue_bill.id if overdue_bill else None, partner_bill_id=\n partner_bill_id)\n phone_status = PhoneStatus.CONNECTED.value\n real_relationship = RealRelationship.user_values()\n commit = CallActionCommit.NO.value\n number = CallActions.select(CallActions.number).where(CallActions.\n phone_status == phone_status, CallActions.real_relationship <<\n real_relationship, CallActions.commit == commit, CallActions.\n application == application.id).order_by(-CallActions.created_at\n ).first()\n if number:\n Contact.update(call_priority=PriorityStatus.REPAY.value).where(\n Contact.user_id == application.user_id, Contact.\n call_priority == PriorityStatus.LAST.value).execute()\n Contact.update(call_priority=PriorityStatus.LAST.value).where(\n Contact.user_id == application.user_id, Contact.number ==\n number.number).execute()\n if not application.latest_bomber_id:\n return\n Inbox.create(title='application %s,sub_bill_id %s repaid' % (\n application.external_id, sub_bill_id), content=\n 'application %s,sub_bill_id %s repaid' % (application.\n external_id, sub_bill_id), receiver=application.\n latest_bomber_id or application.last_bomber_id, category=\n InboxCategory.REPAID.value)\n\n\n@action(MessageAction.BILL_RELIEF)\ndef bill_relief(payload, msg_id):\n \"\"\"已废弃\"\"\"\n bill = payload['head_bill']\n repay_at = str_no_utc_datetime(bill['latest_repay_at'])\n updated_row = Application.update(repay_at=repay_at).where(Application.\n id == bill['external_id']).execute()\n logging.info('application %s bill relief done', bill['external_id'])\n return updated_row\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)\ndef calc_overdue_days_over(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query = Application.update(overdue_days=overdue_days).where(Application\n .status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.\n overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)\n updated_rows_count = query.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count)\n try:\n calc_overdue_days_over_instalment()\n except Exception as e:\n logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))\n apps = Application.filter(Application.status << [ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value], Application.overdue_days > 95, \n Application.promised_date.is_null(True) | (fn.DATE(Application.\n promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {\n 'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n\n\ndef calc_overdue_days_over_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days > 95)\n updated_rows_count = query.execute()\n logging.info(\n 'calc_overdue_days_over_instalment done,count:%s,status:%s' % (\n updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.collection_id,\n OverdueBill.overdue_days).join(Application, JOIN_LEFT_OUTER, on\n =OverdueBill.collection_id == Application.id).where(Application\n .status == status, Application.type == ApplicationType.\n CASH_LOAN_STAGING.value)\n app_update = {}\n for ob in overdue_bills:\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n<mask token>\n\n\ndef calc_overdue_days_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n today_now_time = datetime.now()\n month_first_day = today_now_time.replace(day=1, hour=1, minute=30,\n second=0, microsecond=0)\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days <= 95)\n updated_rows_count = query.execute()\n logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %\n (updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.\n created_at, OverdueBill.collection_id, OverdueBill.overdue_days\n ).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.\n collection_id == Application.id).where(Application.status ==\n status, Application.type == ApplicationType.CASH_LOAN_STAGING.value\n )\n app_update = {}\n for ob in overdue_bills:\n if (ob.status == ApplicationStatus.REPAID.value and ob.\n created_at < month_first_day):\n continue\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)\ndef automatic_escalation(payload, msg_id):\n app_ids = payload.get('application_list', [])\n if not app_ids:\n return\n apps = Application.select().where(Application.id.in_(app_ids), \n Application.status != ApplicationStatus.REPAID.value)\n for a in apps:\n new_cycle = application_entry_different_calculations(a)\n if a.overdue_days < 90:\n logging.info(\n 'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'\n .format(a.id, new_cycle, a.cycle, a.overdue_days))\n if new_cycle > a.cycle:\n with db.atomic():\n if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle\n .C1B.value):\n bomber_id = (a.latest_bomber_id if a.latest_bomber_id else\n a.cycle)\n DispatchAppHistory.update(out_at=datetime.now(),\n out_overdue_days=a.overdue_days).where(\n DispatchAppHistory.application == a.id, \n DispatchAppHistory.bomber_id == bomber_id).execute()\n Escalation.create(application=a.id, type=EscalationType.\n AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,\n current_cycle=a.cycle, escalate_to=new_cycle,\n current_bomber_id=a.latest_bomber)\n dis_app_update = DispatchApp.update(status=DisAppStatus.\n ABNORMAL.value).where(DispatchApp.application == a.id)\n dis_app_update.execute()\n a.cycle = new_cycle\n a.last_bomber = a.latest_bomber\n a.status = ApplicationStatus.UNCLAIMED.value\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n if new_cycle == Cycle.C1B.value:\n a.C1B_entry = datetime.now()\n elif new_cycle == Cycle.C2.value:\n a.C2_entry = datetime.now()\n elif new_cycle == Cycle.C3.value:\n a.C3_entry = datetime.now()\n a.save()\n logging.info('automatic escalation done')\n\n\ndef application_entry_different_calculations(app):\n conf = {(1): [1, 10], (2): [11, 30], (3): [31, 60], (4): [61, 90], (5):\n [91, 999999]}\n for new_cycle, scopes in conf.items():\n if scopes[0] <= app.overdue_days <= scopes[1]:\n return new_cycle\n return app.cycle\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY)\ndef cron_summary(payload, msg_id):\n \"\"\"已废弃\"\"\"\n employees = Bomber.select(Bomber, Role).join(Role)\n summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,\n 'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,\n 'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':\n 0, 'sms_sent': 0} for i in employees}\n now_date = date.today()\n cal_date = now_date - timedelta(days=1)\n claimed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('claimed')).where(fn.DATE(Application.\n claimed_at) == cal_date, Application.status << [ApplicationStatus.\n PROCESSING.value, ApplicationStatus.REPAID.value], Application.\n latest_bomber.is_null(False)).group_by(Application.latest_bomber)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False)).group_by(\n Application.latest_bomber)\n completed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('completed')).where(Application.\n latest_bombing_time.is_null(False), fn.DATE(Application.\n latest_bombing_time) == cal_date, Application.latest_bomber.is_null\n (False)).group_by(Application.latest_bomber)\n escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(\n Escalation.id).alias('escalated')).where(fn.DATE(Escalation.\n created_at) == cal_date, Escalation.type == EscalationType.\n AUTOMATIC.value, Escalation.current_bomber.is_null(False), \n Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation\n .current_bomber)\n transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).\n alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==\n cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(\n Transfer.operator)\n promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(\n BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.\n created_at) == cal_date, BombingHistory.result == BombingResult.\n HAS_PROGRESS.value).group_by(BombingHistory.bomber)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,\n RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.\n current_bomber)\n calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id\n ).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==\n cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)\n calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.\n record_id).alias('calls_connected')).where(fn.DATE(CallLog.\n time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==\n '1').group_by(CallLog.user_id)\n sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(\n ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_\n (ConnectType.sms()), ConnectHistory.created_at >= cal_date, \n ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)\n for i in claimed:\n summary[i.latest_bomber_id]['claimed'] += i.claimed\n for i in completed:\n summary[i.latest_bomber_id]['completed'] += i.completed\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n for i in escalated:\n summary[i.current_bomber_id]['escalated'] += i.escalated\n for i in transferred:\n summary[i.operator_id]['transferred'] += i.transferred\n for i in promised:\n summary[i.bomber_id]['promised'] += i.promised\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in calls_made:\n summary[int(i.user_id)]['calls_made'] += i.calls_made\n for i in calls_connected:\n summary[int(i.user_id)]['calls_connected'] += i.calls_connected\n for i in sms_sent:\n summary[i.operator_id]['sms_sent'] += i.sms_sent\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'claimed': data['claimed'], 'completed': data['completed'],\n 'cleared': data['cleared'], 'escalated': data['escalated'],\n 'transferred': data['transferred'], 'promised': data['promised'\n ], 'amount_recovered': data['amount_recovered'], 'calls_made':\n data['calls_made'], 'calls_connected': data['calls_connected'],\n 'sms_sent': data['sms_sent'], 'date': cal_date})\n if insert_args:\n Summary.insert_many(insert_args).execute()\n cycle_args = []\n escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(\n Escalation.id).alias('escalated_in')).where(Escalation.status ==\n ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==\n cal_date).group_by(Escalation.escalate_to)\n for i in escalated_in:\n cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.\n escalated_in, 'date': cal_date})\n amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM\n (RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)\n for i in amount_recovered_total:\n amount_recovered_total = i.principal_part + i.late_fee_part\n cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':\n amount_recovered_total, 'date': cal_date})\n if cycle_args:\n Summary.insert_many(cycle_args).execute()\n logging.info('cal summary done')\n send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY2)\ndef cron_summary2(payload, msg_id):\n \"\"\"已废弃,定时任务还在执行,具体情况待确定\"\"\"\n cal_date = date.today() - timedelta(days=1)\n employees = Bomber.select(Bomber, Role).join(Role)\n auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,\n AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')\n ).where(fn.DATE(AutoCallActions.created_at) == cal_date)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.\n is_null(False), RepaymentLog.is_bombed == True)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False))\n auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,\n AutoCallActions.result)\n amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)\n cleared = cleared.group_by(Application.latest_bomber)\n summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,\n 'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered': \n 0} for e in employees}\n for a in auto_call_actions:\n summary[a.bomber_id]['answered_calls'] += a.count\n if a.result == AutoCallResult.PTP.value:\n summary[a.bomber_id]['ptp'] += a.count\n if a.result == AutoCallResult.FOLLOW_UP.value:\n summary[a.bomber_id]['follow_up'] += a.count\n if a.result == AutoCallResult.NOT_USEFUL.value:\n summary[a.bomber_id]['not_useful'] += a.count\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'answered_calls': data['answered_calls'], 'ptp': data['ptp'],\n 'follow_up': data['follow_up'], 'not_useful': data['not_useful'\n ], 'cleared': data['cleared'], 'amount_recovered': str(data[\n 'amount_recovered']), 'date': cal_date})\n if insert_args:\n Summary2.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_DISCOUNT_APPROVED)\ndef bomber_discount_approved(payload, msg_id):\n app_id = payload['id']\n msg_type = payload['msg_type']\n discount_to = payload['discount_to']\n effective_to = payload['effective_to']\n application = Application.filter(Application.id == app_id).first()\n if not application:\n logging.error(\n 'discount approved msg send failed application %s not found',\n app_id)\n return\n template = Template.select(Template.text, Template.app).where(Template.\n type == ConnectType.AUTO_SMS.value, Template.id << Template.\n get_auto_sms_tpl(msg_type), Template.app == application.app).first()\n if not template:\n logging.error('discount approved msg send failed template %s not found'\n , msg_type)\n return\n promised_date = None\n if application.promised_date:\n promised_date = application.promised_date.strftime('%d-%m-%Y')\n tpl_data = {'user_name': application.user_name, 'due_days': application\n .overdue_days, 'app_name': application.app, 'phone': application.\n user_mobile_no, 'cs_number': cs_number_conf.get(application.app,\n '02150202889'), 'promised_date': promised_date, 'discount_to':\n discount_to, 'effective_to': effective_to}\n content = template.text.format(**tpl_data)\n data_list = [{'receiver': '62' + application.user_mobile_no, 'content':\n content, 'title': ''}]\n send_sms(data_list, msg_type, application.app)\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST)\ndef bomber_auto_call_list(payload, msg_id):\n with db.atomic():\n bomber_dispatch_app()\n dispatch_instalment_app()\n dis_apps = DispatchApp.select(DispatchApp.application).where(\n DispatchApp.status == DisAppStatus.NORMAL.value)\n c1_apps = Application.select(Application.id, Application.cycle,\n Application.follow_up_date, Application.called_times).where(Application\n .status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.\n AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.\n is_rejected == False, Application.promised_date.is_null(True) | (fn\n .DATE(Application.promised_date) < datetime.today().date())).order_by(\n Application.overdue_days, Application.apply_at)\n dis_apps_ids = [da.application_id for da in dis_apps]\n insert_args = []\n for a in c1_apps:\n if a.id in dis_apps_ids:\n continue\n insert_args.append({'application': a.id, 'cycle': a.cycle,\n 'follow_up_date': a.follow_up_date, 'called_times': 1 if a.\n called_times else 0, 'description': 'init'})\n if not insert_args:\n logging.error('no application need auto call')\n with db.atomic():\n AutoCallList.delete().execute()\n for idx in range(0, len(insert_args), 100):\n AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()\n for idx in range(0, len(insert_args), 100):\n application_list = [i['application'] for i in insert_args[idx:idx +\n 100]]\n send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {\n 'application_list': application_list})\n logging.info('bomber generate auto call list finished')\n send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})\n\n\nclass ChangeBomberTool(object):\n\n @staticmethod\n def in_record(bomber_id, ids, bd):\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.\n alias('application_id'), R(str(bomber_id)).alias('bomber_id'),\n fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(\n 'expected_out_time'), Application.overdue_days.alias(\n 'entry_overdue_days')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << ids)\n Application.update(latest_bomber=bomber_id).where(Application.id.\n in_(ids)).execute()\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bd),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n @staticmethod\n def out_record(a, bd):\n _id = str(a.id)\n DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a\n .overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]\n .get('principal_paid')), out_late_fee_pending=bd[_id].get(\n 'late_fee') - bd[_id].get('late_fee_paid')).where(\n DispatchAppHistory.application == a.id, DispatchAppHistory.\n bomber_id == a.latest_bomber_id).execute()\n a.last_bomber = a.latest_bomber\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n a.save()\n\n @staticmethod\n def classify(l, b):\n if len(l) == 1:\n return l[0]\n _l = filter(lambda x: x['bomber'] != b, l)\n return min(_l, key=lambda x: len(x['ids']))\n\n\n<mask token>\n\n\ndef bomber_dispatch_app():\n try:\n c1a_dispatch_app()\n except Exception as e:\n logging.error('c1a_dispatch_app error:%s' % str(e))\n cycle = {(1): 10, (2): 30, (3): 60, (4): 90}\n apps = Application.select().where(fn.DATE(Application.C2_entry) == date\n .today(), Application.type == ApplicationType.CASH_LOAN.value)\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C2.value)\n apps_ids = [a.id for a in apps]\n dispatch_inserts = []\n start_index = 0\n apps_length = len(apps_ids)\n logging.warning('apps length %s' % str(apps_length))\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)\n gen = CycleIter([b.id for b in bombers])\n existing_list = []\n end_index = start_index + int(apps_length * p.app_percentage)\n logging.info('partner length %s' % str(end_index))\n if not apps_ids[start_index:end_index]:\n continue\n bills = BillService().bill_list(application_ids=apps_ids[\n start_index:end_index])\n bill_dict = {bill['application_id']: bill for bill in bills}\n for a_id in apps_ids[start_index:end_index]:\n bomber = average_gen(gen, existing_list)\n q = DispatchApp.delete().where(DispatchApp.application == a_id\n ).execute()\n dispatch_inserts.append({'application': a_id, 'bomber': bomber,\n 'partner': p.id})\n application = Application.select().where(Application.id == a_id\n ).first()\n application.latest_bomber = bomber\n application.status = ApplicationStatus.AB_TEST.value\n application.ptp_bomber = None\n application.save()\n day_next_cycle = cycle.get(application.cycle\n ) - application.overdue_days\n DispatchAppHistory.create(application=a_id, partner_id=p.id,\n bomber_id=bomber, entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - Decimal(\n bill_dict[a_id].get('principal_paid')),\n entry_late_fee_pending=Decimal(bill_dict[a_id].get(\n 'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')\n ), expected_out_time=date.today() + timedelta(days=\n day_next_cycle))\n start_index = end_index\n with db.atomic():\n for idx in range(0, len(dispatch_inserts), 100):\n DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()\n config = SystemConfig.prefetch(SCI.AB_TEST_C2)\n c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)\n c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)\n c2 = apps_ids[start_index:]\n if c2:\n bills = BillService().bill_list(application_ids=c2)\n else:\n bills = []\n bill_dict = {bill['application_id']: bill for bill in bills}\n logging.info('c2 AB_test length: %s' % str(c2))\n gen = CycleIter(c2_bomber)\n existing_list = []\n for c in c2:\n bomber = average_gen(gen, existing_list)\n application = Application.filter(Application.id == c).first()\n application.status = ApplicationStatus.AB_TEST.value\n application.latest_bomber = bomber\n application.ptp_bomber = None\n application.save()\n day_next_cycle = 46 - application.overdue_days\n DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at\n =datetime.now(), entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - bill_dict[c].get(\n 'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(\n 'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),\n expected_out_time=date.today() + timedelta(days=day_next_cycle))\n ab_test_other()\n\n\ndef c1a_dispatch_app():\n today = datetime.today().date()\n tomorrow = today + timedelta(days=1)\n c1a_apps = Application.select().where(Application.status << [\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value], Application.dpd1_entry >= today, Application.dpd1_entry <\n tomorrow, Application.type == ApplicationType.CASH_LOAN.value)\n all_aids = [a.id for a in c1a_apps]\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C1A.value)\n end = 0\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n is_del == 0)\n start = end\n end += int(len(all_aids) * p.app_percentage)\n aids = all_aids[start:end]\n bids = [b.id for b in bombers]\n if not bids or not aids:\n continue\n average_number = get_average_number(len(aids), len(bids))\n p_end = 0\n for i, bid in enumerate(bids):\n p_start = p_end\n p_end += average_number[i]\n b_aids = aids[p_start:p_end]\n with db.atomic():\n q = Application.update(latest_bomber=bid, status=\n ApplicationStatus.AB_TEST.value).where(Application.id <<\n b_aids).execute()\n params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,\n 'application_ids': b_aids, 'dest_bomber_id': bid}\n new_in_record(**params)\n try:\n dispatch_inserts = []\n for aid in b_aids:\n dispatch_inserts.append({'application': aid, 'bomber':\n bid, 'partner': p.id, 'status': DisAppStatus.NORMAL\n .value})\n if dispatch_inserts:\n q = DispatchApp.insert_many(dispatch_inserts).execute()\n except Exception as e:\n logging.error('c1a分件写入dispatch_app error:%s' % str(e))\n\n\n<mask token>\n\n\ndef get_cash_bomber(bids, cycle):\n cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==\n 0, Bomber.instalment != cycle)\n cash_bids = [b.id for b in cash_bombers]\n return cash_bids\n\n\n<mask token>\n\n\ndef out_and_in_record_instalment(**kwargs):\n if not kwargs.get('application_ids'):\n return\n out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(\n DispatchAppHistory.application << kwargs['application_ids'],\n DispatchAppHistory.out_at.is_null(True)).execute()\n cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}\n period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')\n kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(kwargs['dest_bomber_id'])).alias(\n 'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.\n alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).\n alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << kwargs[\n 'application_ids'])\n application_list = list(subquery)\n for idx in range(0, len(application_list), 50):\n applications = application_list[idx:idx + 50]\n app_ids = [i.application_id for i in applications]\n overdue_bills = OverdueBill.select().where(OverdueBill.\n collection_id << app_ids)\n sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]\n bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)\n insert_args = lambad_instalment_result(bill_list, applications)\n if not insert_args:\n continue\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef c1b_dispatch_in_record(**kwargs):\n app_ids = kwargs.get('apps')\n partner_id = kwargs.get('partner_id', 'null')\n bill_dict = kwargs.get('bill_dict')\n period = kwargs.get('period')\n bomber_id = kwargs.get('bomber_id')\n if not all([app_ids, partner_id, bill_dict, period]):\n return False\n bill_dict = {str(k): v for k, v in bill_dict.items()}\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().\n alias('entry_at'), Application.overdue_days.alias(\n 'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.id << app_ids)\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef check_call_history(application):\n app_create_at = application.created_at + timedelta(days=4)\n if datetime.today().date() > app_create_at.date():\n call_actions = CallActions.select().where(CallActions.type == 0, \n CallActions.application == application.id, CallActions.\n created_at > datetime.now() - timedelta(days=5))\n for call in call_actions:\n if call.phone_status == PhoneStatus.CONNECTED.value:\n return True\n return False\n return True\n\n\n@action(MessageAction.BOMBER_SCAVENGER)\ndef scavenger(payload, msg_id):\n scavenger_time = -60\n scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'SCAVENGER_TIME').first()\n if scavenger and scavenger.value.isdigit():\n scavenger_time = -int(scavenger.value)\n update_auto_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value, description='scavenger').where(AutoCallList.status ==\n AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime\n .now() + timedelta(minutes=scavenger_time))\n count = update_auto_call_list.execute()\n logging.info('scavenger processed %s application', count)\n mail_box_scavenger_time = -30\n mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'MAIL_BOX_SCAVENGER_TIME').first()\n if mail_box_scavenger and mail_box_scavenger.value.isdigit():\n mail_box_scavenger_time = -int(mail_box_scavenger.value)\n update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.\n value, AutoCallList.updated_at < datetime.now() + timedelta(minutes\n =mail_box_scavenger_time))\n mail_box_count = update_mail_box_call_list.execute()\n logging.info('scavenger update mail box %s', mail_box_count)\n update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value\n ).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.\n updated_at < datetime.now() + timedelta(minutes=-30))\n ivr_result = update_auto_ivr.execute()\n logging.info('scavenger update %s ivr' % ivr_result)\n\n\n<mask token>\n\n\n@action(MessageAction.REPORT_BOMBER_COLLECTION)\ndef report_bomber_collection(payload, msg_id):\n start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)\n ).scalar()\n now = datetime.now()\n if start_date and str(start_date) == str(now)[:10]:\n return\n end_date = str(now + timedelta(days=1))[:10]\n start_date = str(now)[:10]\n dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))\n all_overdue_loan_sql1 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n GROUP BY 1;\n \"\"\"\n s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()\n d1 = OperatedDict(s_data1)\n all_overdue_loan_sql2 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE DATE(ba.follow_up_date) > CURDATE()\n AND ba.called_counts = 0\n GROUP BY 1;\n \"\"\"\n s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()\n d2 = OperatedDict(s_data2)\n overdue_loans_entered_into_predict_call_system_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE ba.called_counts >= 1\n GROUP BY 1;\n \"\"\"\n s_data3 = readonly_db.execute_sql(\n overdue_loans_entered_into_predict_call_system_sql).fetchall()\n d3 = OperatedDict(s_data3)\n loans_completed_sql = \"\"\"\n SELECT ba.cycle, COUNT(DISTINCT ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()\n d4 = OperatedDict(s_data4)\n connected_calls_automatic_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()\n d5 = OperatedDict(s_data5)\n connected_calls_manual_sql = \"\"\"\n SELECT bb.cycle, COUNT(bb.id)\n FROM bomber.bombing_history bb\n WHERE DATE(bb.created_at) = curdate()\n AND (bb.bomber_id < 150 OR bb.bomber_id > 200)\n GROUP BY bb.cycle;\n \"\"\"\n s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()\n d6 = OperatedDict(s_data6)\n logging.info('Directly get data from database successfully.')\n c1 = d1 - d2\n c2 = d3\n c3 = c2 / c1\n c4 = d4\n c5 = c4 / c2\n c6 = d5\n c7 = c6 / c4\n c8 = d6\n c9 = OperatedDict(get_agent())\n c10 = (c6 + c8) / c9\n try:\n c11 = average_call_duration_team(start_date, end_date)\n except AttributeError:\n c11 = {}\n lst = []\n for i in range(1, 5):\n lst.append({'apply_date': start_date, 'cycle': dct[i],\n 'all_overdue_loan': c1.get(i, 0),\n 'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),\n 'of_overdue_loans_entered_into_predict_call_system': round(c3.\n get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),\n 'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *\n 100, 1), 'connected_calls_automatic': c6.get(i, 0),\n 'connected_calls_automatic_completed_loans': round(c7.get(i, 0),\n 1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i, \n 0), 'average_calls_agent': round(c10.get(i, 0), 1),\n 'average_call_duration_team': round(c11.get(i, 0), 1)})\n ReportCollection.insert_many(lst).execute()\n logging.info('report_bomber_collection:Done!')\n\n\n<mask token>\n\n\ndef new_out_record(**kwargs):\n if not kwargs['application_ids']:\n return\n DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.\n bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.\n application << kwargs['application_ids'], DispatchAppHistory.out_at\n .is_null(True)).execute()\n if kwargs.get('month_dispatch'):\n return\n try:\n Application.update(ptp_bomber=None).where(Application.id << kwargs[\n 'application_ids']).execute()\n except Exception as e:\n logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[\n 'application_ids'], str(e)))\n\n\n<mask token>\n\n\ndef end_old_application(old_app, paid=False):\n if paid:\n if old_app.status == OldLoanStatus.WAITING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return\n if old_app.status == OldLoanStatus.PROCESSING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return old_app.application_id\n end_date = old_app.end_date\n now = datetime.now()\n if now >= max(end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n old_app.save()\n return old_app.application_id\n\n\n<mask token>\n\n\ndef start_old_application(old_app, cancel=False):\n application_id = old_app.application_id\n if cancel and old_app.status == OldLoanStatus.PAID.value:\n now = datetime.now()\n if old_app.start_date is None:\n old_app.status = OldLoanStatus.WAITING.value\n elif now >= max(old_app.end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n DispatchAppHistory.update(out_at=max(old_app.end_date, old_app.\n promised_date or now)).where(DispatchAppHistory.bomber_id ==\n old_app.bomber_id, DispatchAppHistory.application ==\n application_id).execute()\n else:\n old_app.status = OldLoanStatus.PROCESSING.value\n DispatchAppHistory.update(out_at=None).where(DispatchAppHistory\n .bomber_id == old_app.bomber_id, DispatchAppHistory.\n application == application_id).execute()\n old_app.save()\n return\n application = Application.get_or_none(Application.id == application_id,\n Application.status != ApplicationStatus.REPAID.value, Application.\n overdue_days > 90, Application.promised_date.is_null(True) | (fn.\n DATE(Application.promised_date) < datetime.today().date()))\n if not application:\n logging.error('Can not set old application %s to start collecting',\n application_id)\n return\n if old_app.status in OldLoanStatus.no_available():\n logging.info('%s has finished or paid', old_app.application_id)\n return\n config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)\n sp = config.get(SCI.OLD_APP_PERIOD, SCI.OLD_APP_PERIOD.default_value)\n old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value\n old_app.status = OldLoanStatus.PROCESSING.value\n old_app.bomber_id = old_app_bomber\n old_app.start_date = datetime.now()\n if not old_app.end_date:\n old_app.end_date = datetime.now() + timedelta(days=sp)\n old_app.save()\n in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,\n application_ids=[old_app.application_id], expected_out_time=str(\n old_app.end_date))\n\n\n<mask token>\n\n\ndef run_member_sql(sql):\n result = [0, 0]\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n sql_result = cursor.fetchone()\n if sql_result:\n result = sql_result\n except Exception as e:\n logging.info('run sql error: %s' % str(sql))\n return result\n\n\n<mask token>\n\n\ndef get_before_bomber(date_time):\n begin_time = str(date_time - timedelta(days=7))\n end_time = str(date_time)\n old_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at<date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (begin_time, begin_time))\n old_data = run_one_sql(old_sql)\n new_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at> '%s' \n and created_at<'%s'\n and overdue_days=1;\n \"\"\"\n % (begin_time, end_time))\n new_data = run_one_sql(new_sql)\n dpd4_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>date_add('%s',interval 1 day) \n and created_at< date_add('%s',interval 1 day)\n and overdue_days=4;\n \"\"\"\n % (begin_time, end_time))\n dpd4_data = run_one_sql(dpd4_sql)\n dpd2_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at< date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (end_time, end_time))\n dpd2_data = run_one_sql(dpd2_sql)\n all_money = old_data + new_data\n repayment = all_money - dpd4_data - dpd2_data\n pro = 0\n if all_money:\n pro = repayment / all_money * 100\n RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,\n proportion=pro, repayment=repayment)\n\n\n<mask token>\n\n\ndef get_c1a_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=19)\n repayment_sql = (\n \"\"\"\n select \n sum(b.principal_part+b.late_fee_part) as paid_amount, \n cdt\n from \n (select \n br.principal_part, br.late_fee_part, \n date(cdt) as cdt, br.repay_at, br.application_id\n from (\n select ba.id, ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n if not repayment:\n return\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n for d in repayment:\n repay = d[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n d[1], RepaymentReportInto.cycle == Cycle.C1A.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = round(repay, 3)\n pro = repay / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_c2_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n not_contain_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_money = run_one_sql(not_contain_sql)\n begin_date = date_time - timedelta(days=37)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n not_contain_repay_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_repay = run_all_sql(not_contain_repay_sql)\n if not not_contain_money and repayment:\n return\n for i in ContainOut.values():\n if i == ContainOut.NOT_CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(\n not_contain_money, 3), proportion='0', repayment=0,\n is_first_loan=is_first_loan, contain_out=ContainOut.\n NOT_CONTAIN.value)\n for repay in not_contain_repay:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n elif i == ContainOut.CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(all_money, 3),\n proportion='0', repayment=0, is_first_loan=\n is_first_loan, contain_out=ContainOut.CONTAIN.value)\n for repay in repayment:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\ndef get_c3_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id, ba.c3_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=30)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c3_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c3_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = '%s'\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 4\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C3.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n if not repayment:\n return\n for repay in repayment:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n repay[1], RepaymentReportInto.cycle == Cycle.C3.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = repay_money\n pro = 0\n if report.all_money and int(report.all_money):\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_static_bomber(begin_date):\n active_date = begin_date - timedelta(days=8)\n bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),\n BomberR.last_active_at.alias('active')).where(BomberR.\n last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])\n summary = []\n for bomber in bombers:\n summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':\n bomber.role.cycle, 'work_ind': 0})\n SummaryBomber.insert_many(summary).execute()\n\n\n<mask token>\n\n\n@time_logger\ndef get_call_and_made(end_date, begin_date, real_time_query=False):\n call_sql = (\n \"\"\"\n select \n bomber_id, \n count(case when relationship is not null then application_id end) \n as 'call_cnt', \n count(distinct case when relationship is not null then \n application_id end) as 'call_case',\n count(case when phone_status=4 then application_id end) as 'connect',\n count(distinct case when phone_status=4 then application_id end) \n as 'connect_case'\n from (\n select bomber_id,application_id,phone_status, cycle, relationship\n from bomber.call_actions ba\n where created_at>'%s' and created_at<'%s'\n and type in (0, 1)\n ) a\n group by 1\n \"\"\"\n % (begin_date, end_date))\n calls = run_all_sql(call_sql)\n if real_time_query:\n return calls\n for call in calls:\n bomber, call_cnt, case_made, connect_cnt, case_connect = call\n SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,\n call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(\n SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date\n ).execute()\n return calls\n\n\n@time_logger\ndef get_claimed_cnt(end_date, begin_date, real_time_query=False):\n table_date = begin_date - timedelta(days=30)\n claimed_sql = (\n \"\"\"\n SELECT\n COUNT( `t1`.`application_id` ) AS cnt,\n `t1`.`bomber_id` AS bomber_id \n FROM\n `dispatch_app_history` AS t1 \n WHERE\n ( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null ) \n AND ( `t1`.`bomber_id` != 1000 ) \n AND ( `t1`.`partner_id` IS null ) \n AND ( `t1`.`entry_at` > '%s' ) \n AND ( `t1`.`entry_at` < '%s' ) \n GROUP BY\n `t1`.`bomber_id`\n \"\"\"\n % (begin_date, table_date, end_date))\n claimeds = run_all_sql(claimed_sql)\n if real_time_query:\n return claimeds\n for claimed in claimeds:\n cnt, bomber_id = claimed\n SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == bomber_id).execute()\n return claimeds\n\n\ndef get_sms_data(end_data, begin_data):\n all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(\n 'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')\n ).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.\n created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())\n ).group_by(ConnectHistoryR.operator)\n for sms in all_sms:\n SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==\n begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()\n return all_sms\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_NEW)\ndef summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_active_bomber(begin_date)\n get_call_and_made(end_date, begin_date)\n get_claimed_cnt(end_date, begin_date)\n get_sms_data(end_date, begin_date)\n get_ptp_data(end_date, begin_date)\n get_recover_amount(end_date, begin_date)\n get_unfollowed(begin_date)\n get_unfollowed_call(begin_date)\n\n\ndef get_new_case_amount(begin_date, end_date):\n all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.\n entry_late_fee_pending + DispatchAppHistoryR.\n entry_principal_pending).alias('pending'), DispatchAppHistoryR.\n bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')\n ).where(DispatchAppHistoryR.entry_at > begin_date, \n DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.\n partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)\n for case in all_case:\n SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt\n =case.cnt).where(SummaryBomber.bomber_id == case.bomber_id, \n SummaryBomber.time == begin_date).execute()\n return all_case\n\n\n<mask token>\n\n\ndef get_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c2_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c2_entry > '%s' \n AND ba.c2_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c3_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c3_entry > '%s' \n AND ba.c3_entry < '%s' \n ) a\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date, end_date))\n case_cleared_sums = run_all_sql(sql)\n for clear in case_cleared_sums:\n SummaryBomber.update(new_case_cleared_sum=clear[1]).where(\n SummaryBomber.time == begin_date, SummaryBomber.bomber_id ==\n clear[0]).execute()\n\n\n@time_logger\ndef get_new_case_call(begin_date, end_date, real_query_time=False):\n sql = (\n \"\"\"\n SELECT\n bd.bomber_id,\n count( DISTINCT bd.application_id )\n FROM\n bomber.dispatch_app_history bd\n INNER JOIN bomber.call_actions bc \n ON bd.application_id = bc.application_id \n AND bd.bomber_id = bc.bomber_id \n AND date( bd.entry_at ) = date( bc.created_at ) \n WHERE\n entry_at > '%s' \n AND entry_at < '%s' \n AND partner_id IS NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n new_case_calls = run_all_sql(sql)\n if real_query_time:\n return new_case_calls\n for call in new_case_calls:\n SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber\n .bomber_id == call[0], SummaryBomber.time == begin_date).execute()\n return new_case_calls\n\n\n@time_logger\ndef get_calltime_avg(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction,\n count( 1 ) AS auto_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' ' \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n AND an.recording is not null\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]][0] += data[1]\n result[data[0]][1] += data[2]\n continue\n result[data[0]] = [data[1], data[2]]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=\n value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == key).execute()\n return result\n\n\ndef get_no_calltime_avg(begin_date, end_date):\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND (an.status!='ANSWERED' or an.recording is null) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n for data in manuals:\n SummaryBomber.update(calltime_no_case_sum=data[1],\n calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /\n data[2] if data[2] else 0).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == data[0]).execute()\n\n\n@time_logger\ndef get_calltime_sum(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' '\n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]] += data[1]\n continue\n result[data[0]] = data[1]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == key).execute()\n return result\n\n\n<mask token>\n\n\n@action(MessageAction.UPDATE_SUMMARY_NEW)\ndef update_summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'UPDATE_SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_new_case_amount(begin_date, end_date)\n get_kp_cleared(begin_date, end_date)\n get_kp_today(begin_date, end_date)\n get_ptp_cnt(begin_date, end_date)\n get_ptp_call_cnt(begin_date, end_date)\n get_new_case_cleared(begin_date, end_date)\n get_new_case_call(begin_date, end_date)\n get_calltime_avg(begin_date, end_date)\n get_no_calltime_avg(begin_date, end_date)\n get_calltime_sum(begin_date, end_date)\n\n\ndef get_cycle_claimed(begin_date, end_date):\n sql = (\n \"\"\"\n select cycle,count(1)\n from bomber.application where cycle in (1,2,3,4)\n and (finished_at is null or (finished_at>'%s'))\n and created_at>'2018-09-01'\n group by 1\n \"\"\"\n % begin_date)\n result = run_all_sql(sql)\n return result\n\n\n@time_logger\ndef cycle_new_case(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( ba.id ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.created_at ) = bo.which_day_overdue \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c1b_entry ) = bo.which_day_overdue\n WHERE\n c1b_entry > '%s' \n AND c1b_entry < '%s' \n UNION\n SELECT\n 3 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c2_entry ) = bo.which_day_overdue \n WHERE\n c2_entry > '%s' \n AND c2_entry < '%s' \n UNION\n SELECT\n 4 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c3_entry ) = bo.which_day_overdue\n WHERE\n c3_entry > '%s' \n AND c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n all_datas = run_all_sql(sql)\n if real_time_query:\n return all_datas\n for data in all_datas:\n SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()\n return all_datas\n\n\n@time_logger\ndef get_cycle_new_case_call(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.created_at ) = date( bc.created_at ) \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c1b_entry ) = date( bc.created_at ) \n WHERE\n ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s'\n UNION\n SELECT\n 3 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c2_entry ) = date( bc.created_at ) \n WHERE\n ba.c2_entry > '%s' \n AND ba.c2_entry < '%s'\n UNION\n SELECT\n 4 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c3_entry ) = date( bc.created_at ) \n WHERE\n ba.c3_entry > '%s' \n AND ba.c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n cycle_datas = run_all_sql(sql)\n if real_time_query:\n return cycle_datas\n for data in cycle_datas:\n SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber\n .time == begin_date, SummaryBomber.cycle == data[0], \n SummaryBomber.bomber_id == data[0]).execute()\n return cycle_datas\n\n\ndef get_cycle_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n '1' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.created_at ) = date( br.repay_at ) \n WHERE ba.created_at > '%s' \n AND ba.created_at < '%s' \n GROUP BY 1, 2 ) a \n UNION\n SELECT \n '2' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n GROUP BY 1, 2) a\n \"\"\"\n % (begin_date, end_date, begin_date, end_date))\n cycle_cleared = run_all_sql(sql)\n for i in cycle_cleared:\n SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber\n .cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.\n time == begin_date).execute()\n\n\n<mask token>\n\n\ndef get_change_bomber():\n cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle\n .C3.value}\n result = {}\n bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,\n BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=\n BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.\n created_at) == date.today(), BomberLog.role_id << list(\n cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.\n instalment == 0).dicts()\n for b_log in bomber_logs:\n cycle = cycle_role_map.get(b_log['role_id'])\n group_id = b_log['group_id']\n if cycle in result:\n if group_id not in result[cycle]:\n result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}\n else:\n result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}}\n if b_log['operation'] == 0:\n result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])\n if result:\n bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map\n .keys())), Bomber.is_del == 0, Bomber.instalment == 0)\n for b in bombers:\n cycle_result = result.get(cycle_role_map[b.role_id], {})\n role_result = cycle_result.get(b.group_id)\n if not role_result:\n continue\n role_result['new_ids'].append(b.id)\n resutl_list = []\n for cycle, group_dict in result.items():\n resutl_list.extend(list(group_dict.values()))\n return resutl_list\n return []\n\n\n<mask token>\n\n\ndef get_surplus_application(new_ids, del_ids, average_nums, classified_apps):\n surplus_apps = []\n for del_id in del_ids:\n del_res = classified_apps.get(del_id, {})\n p_list = del_res.get('p_list', [])\n np_list = del_res.get('np_list', [])\n del_res['need_num'] = -(len(p_list) + len(np_list))\n del_res['to_list'] = np_list + p_list\n surplus_apps.extend(p_list)\n surplus_apps.extend(np_list)\n for index, bid in enumerate(new_ids):\n average = average_nums[index]\n bomber_app = classified_apps.get(bid)\n if not bomber_app:\n bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid\n ).first()\n bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':\n [], 'np_num': 0, 'need_num': average, 'partner_id': bomber.\n partner_id if bomber else ''}\n classified_apps[bid] = bomber_app\n else:\n p_num = len(bomber_app['p_list'])\n np_num = len(bomber_app['np_list'])\n if p_num > average:\n bomber_app['need_num'] = -np_num\n else:\n bomber_app['need_num'] = average - (p_num + np_num)\n bomber_app['p_num'] = p_num\n bomber_app['np_num'] = np_num\n if bomber_app['need_num'] < 0:\n random.shuffle(bomber_app['np_list'])\n res_over = bomber_app['np_list'][:-bomber_app['need_num']]\n bomber_app['to_list'] = res_over\n surplus_apps.extend(res_over)\n classified_apps_list = sorted(classified_apps.values(), key=lambda x: x\n ['need_num'], reverse=True)\n return surplus_apps, classified_apps_list\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)\ndef bomber_dispatch_applications(payload, msg_id):\n change_bombers = get_change_bomber()\n instalment_change_bombers = get_instalment_change_bomber()\n params = {ApplicationType.CASH_LOAN.value: change_bombers,\n ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}\n for type, bombers in params.items():\n change_bomber_dispatch_apps(change_bombers=bombers, type=type)\n\n\n<mask token>\n\n\ndef get_summary_daily_time():\n mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')\n mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')\n now_date = datetime.now()\n now_date_time = now_date.time()\n today_str = str(now_date.date())\n if now_date_time < mid_time_t1.time():\n yes_date = now_date - timedelta(days=1)\n yes_date_str = str(yes_date.date())\n begin_str = yes_date_str + ' 17:20:00'\n end_str = today_str + ' 00:00:00'\n elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():\n begin_str = today_str + ' 00:00:00'\n end_str = today_str + ' 12:40:00'\n else:\n begin_str = today_str + ' 12:40:00'\n end_str = today_str + ' 17:20:00'\n begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')\n end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')\n summary_datetime = now_date - timedelta(minutes=30)\n summary_date = summary_datetime.date()\n return begin_time, end_time, summary_date\n\n\n@action(MessageAction.SUMMARY_DAILY)\ndef summary_daily_data(payload, msg_id):\n begin_time, end_time, summary_date = get_summary_daily_time()\n call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.\n bomber_id, CallActionsR.application_id, CallActionsR.promised_date,\n CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(\n CallActionsR.created_at >= begin_time, CallActionsR.created_at <\n end_time, CallActionsR.type << (0, 1))\n summary_dailys = {}\n for call in call_actions:\n if call.bomber_id not in summary_dailys:\n summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.\n bomber_id, 'summary_date': str(summary_date)}\n if call.name and call.number:\n summary_dailys[call.bomber_id]['call_cnt'] += 1\n if call.promised_date:\n summary_dailys[call.bomber_id]['ptp_cnt'] += 1\n C1_sql = (\n \"\"\"\n SELECT a.current_bomber_id,\n sum(principal_part+late_fee_part) as pay_amount,a.cycle\n from \n (select a.cycle,a.current_bomber_id,b.username,a.principal_part,\n a.late_fee_part,a.application_id,a.repay_at\n FROM bomber.repayment_log a ,bomber.bomber b\n WHERE a.repay_at >= '%s' AND a.repay_at <'%s'\n AND a.current_bomber_id !=''\n AND a.current_bomber_id = b.id\n and b.role_id in (1,2,4,5)\n and principal_part+late_fee_part>0\n group by 6,7) a\n GROUP BY a.cycle,a.current_bomber_id\n \"\"\"\n % (begin_time, end_time))\n C1_repayment = run_all_sql(C1_sql)\n other_sql = (\n \"\"\"\n select current_bomber_id,sum(pay_amount) as pay_amount,cycle\n from (\n select application_id,current_bomber_id,pay_amount,repay_at,cycle\n from (\n select br.application_id,br.current_bomber_id,\n br.principal_part+br.late_fee_part as pay_amount,br.repay_at,\n br.cycle\n from bomber.repayment_log br\n left join bomber.bomber bb on br.current_bomber_id=bb.id\n where exists (select 1 from bomber.bombing_history bb \n where br.current_bomber_id=bb.bomber_id \n and br.application_id=bb.application_id \n and bb.created_at<br.repay_at \n and (bb.promised_date is not null \n or bb.promised_amount is not null))\n and br.repay_at >= '%s'\n and br.repay_at < '%s'\n and bb.role_id in (3,6,7,8,9) \n and br.principal_part+br.late_fee_part > 0\n group by 1,4\n ) a\n group by 1,4) b\n group by 1\n \"\"\"\n % (begin_time, end_time))\n other_repayment = run_all_sql(other_sql)\n all_repayment = C1_repayment + other_repayment\n for res in all_repayment:\n bomber_id, pay_amount, cycle = res\n if bomber_id in summary_dailys:\n summary_dailys[bomber_id]['repayment'] += pay_amount\n else:\n summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': cycle, 'repayment': pay_amount, 'bomber_id':\n bomber_id, 'summary_date': str(summary_date)}\n insert_values = list(summary_dailys.values())\n if insert_values:\n SummaryDaily.insert_many(insert_values).execute()\n\n\n<mask token>\n\n\ndef get_app_logs(apps):\n app_logs = {}\n all_np_apps = []\n all_p_apps = []\n for a in apps:\n latest_bomber = a['latest_bomber']\n latest_bomber = a['cycle'] if not latest_bomber else latest_bomber\n if latest_bomber in app_logs:\n app_logs[latest_bomber]['to_ids'].append(a['id'])\n else:\n app_logs[latest_bomber] = {'bomber_id': latest_bomber, 'to_ids':\n [a['id']], 'np_ids': [], 'p_ids': []}\n if a['promised_date'] and a['promised_date'].date() >= datetime.now(\n ).date():\n app_logs[latest_bomber]['p_ids'].append(a['id'])\n all_p_apps.append(a)\n else:\n app_logs[latest_bomber]['np_ids'].append(a['id'])\n all_np_apps.append(a)\n return app_logs, all_np_apps, all_p_apps\n\n\ndef month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):\n apps = list(apps)\n np_apps = list(np_apps)\n random.shuffle(np_apps)\n apps_len = len(apps)\n np_apps_len = len(np_apps)\n end = 0\n all_app_precentage = 0\n partners = Partner.select().where(Partner.cycle == cycle, Partner.\n status == PartnerStatus.NORMAL.value)\n for p in partners:\n all_app_precentage += p.app_percentage\n for partner in partners:\n bombers = Bomber.select().where(Bomber.partner == partner.id, \n Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.\n value)\n bids = {b.id: b for b in bombers}\n if len(bids) == 0:\n logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))\n continue\n start = end\n if np_apps_len >= int(apps_len * all_app_precentage):\n end = start + int(apps_len * partner.app_percentage)\n else:\n end = start + int(np_apps_len * partner.app_percentage /\n all_app_precentage)\n partner_app = np_apps[start:end]\n dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)\n np_apps = np_apps[end:]\n return np_apps\n\n\ndef month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):\n sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',\n (4): 'AB_TEST_C3'}\n sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])\n sys_values = json.loads(sys_config.value)\n bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0\n )\n if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):\n bombers = bombers.where(Bomber.instalment == 0)\n bids = {b.id: b for b in bombers}\n if cycle == Cycle.C1A.value:\n np_ids = [a['id'] for a in np_apps]\n np = Application.update(status=ApplicationStatus.PROCESSING.value,\n ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids\n ).execute()\n bomber_app_logs = app_logs.get(cycle, {})\n out_param = {'application_ids': bomber_app_logs.get('to_ids', []),\n 'month_dispatch': 1, 'src_bomber_id': cycle}\n new_out_record(**out_param)\n in_param = {'cycle': cycle, 'application_ids': np_ids,\n 'dest_bomber_id': cycle}\n new_in_record(**in_param)\n bomber_app_logs['need_num'] = len(np_apps)\n bomber_app_logs['form_ids'] = np_ids\n bomber_app_logs['status'] = 1\n else:\n dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)\n dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)\n\n\ndef dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,\n type=ApplicationType.CASH_LOAN.value):\n apps = list(apps)\n random.shuffle(apps)\n bids_list = list(bids.keys())\n if len(bids_list) <= 0:\n logging.info('get_dispatch_app_to_bomber no bids')\n return\n average_num = get_average_number(len(apps), len(bids_list))\n bomber_end = 0\n with db.atomic():\n for index, bid in enumerate(bids_list):\n current_bomber = bids.get(bid)\n bomber_app_logs = app_logs.get(bid, {})\n bomber_start = bomber_end\n bomber_end = bomber_start + average_num[index]\n bomber_apps = apps[bomber_start:bomber_end]\n from_p, from_np, from_ids, status = [], [], [], 0\n for ba in bomber_apps:\n promised_date = ba.get('promised_date')\n from_ids.append(ba['id'])\n if promised_date and promised_date.date() >= date.today():\n from_p.append(ba['id'])\n else:\n from_np.append(ba['id'])\n app_status = ApplicationStatus.AB_TEST.value\n if (cycle == Cycle.C1A.value and not out_partner and type ==\n ApplicationType.CASH_LOAN.value):\n app_status = ApplicationStatus.PROCESSING.value\n if from_p:\n p = Application.update(ptp_bomber=bid, latest_bomber=bid,\n status=app_status).where(Application.id << from_p).execute(\n )\n p_ids = bomber_app_logs.get('p_ids', []) + from_p\n bomber_app_logs['p_ids'] = p_ids\n if from_np:\n np = Application.update(latest_bomber=bid, ptp_bomber=None,\n status=ApplicationStatus.AB_TEST.value).where(\n Application.id << from_np).execute()\n np_ids = bomber_app_logs.get('np_ids', []) + from_np\n bomber_app_logs['np_ids'] = np_ids\n in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.\n partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}\n if type == ApplicationType.CASH_LOAN.value:\n out_param = {'src_bomber_id': bid, 'application_ids':\n bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}\n new_out_record(**out_param)\n new_in_record(**in_param)\n else:\n out_and_in_record_instalment(**in_param)\n bomber_app_logs['status'] = 1\n need_num = bomber_app_logs.get('need_num', 0) + average_num[index]\n bomber_app_logs['need_num'] = need_num\n all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids\n bomber_app_logs['form_ids'] = all_form_ids\n if not out_partner:\n continue\n try:\n DispatchApp.delete().where(DispatchApp.application.in_(\n from_ids)).execute()\n dispatch_ins = [{'application': id, 'partner':\n current_bomber.partner_id, 'bomber': bid, 'status':\n DisAppStatus.NORMAL.value} for id in from_ids]\n DispatchApp.insert_many(dispatch_ins).execute()\n except Exception as e:\n logging.info(\n 'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str\n (e), bid, from_ids))\n\n\n<mask token>\n\n\ndef calc_entry_time(overdue_days):\n app_entry_time = {}\n overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],\n 'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}\n for key, value in overdue_entry.items():\n if value[0] <= overdue_days <= value[1]:\n app_entry_time[key] = datetime.now()\n else:\n app_entry_time[key] = None\n return app_entry_time\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_BOMBER_OVERDUE)\ndef summary_bomber_overdue_everyday(payload, msg_id):\n cycle_list = Cycle.values()\n which_day = date.today()\n for cycle in cycle_list:\n apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,\n ApplicationR.ptp_bomber, ApplicationR.overdue_days,\n ApplicationR.promised_date, ApplicationR.follow_up_date,\n ApplicationR.external_id, OverdueBillR.status, OverdueBillR.\n periods, OverdueBillR.sub_bill_id).join(OverdueBillR,\n JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id\n ).where(ApplicationR.status != ApplicationStatus.REPAID.value, \n ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()\n bomber_overdue_list = []\n for app in apps:\n status = app.get('status')\n if status == ApplicationStatus.REPAID.value:\n continue\n ptp_bomber = app.get('ptp_bomber')\n promised_date = app.get('promised_date')\n follow_up_date = app.get('follow_up_date')\n if not promised_date or promised_date.date() < date.today():\n ptp_bomber = promised_date = None\n if not follow_up_date or follow_up_date.date() < date.today():\n follow_up_date = None\n overdue_dict = {'collection_id': app.get('id'), 'external_id':\n app.get('external_id'), 'sub_bill_id': app.get(\n 'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app\n .get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':\n ptp_bomber, 'promised_date': promised_date,\n 'follow_up_date': follow_up_date, 'which_day': which_day,\n 'overdue_days': app.get('overdue_days')}\n bomber_overdue_list.append(overdue_dict)\n try:\n if bomber_overdue_list:\n with db.atomic():\n for index in range(0, len(bomber_overdue_list), 1000):\n insert_list = bomber_overdue_list[index:index + 1000]\n BomberOverdue.insert_many(insert_list).execute()\n except Exception as e:\n logging.info(\n 'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %\n (cycle, str(which_day), str(e)))\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)\ndef today_ptp_auto_call_switch(payload, msg_id):\n today = datetime.today().date()\n next_day = today + timedelta(days=1)\n apps = ApplicationR.select(ApplicationR.latest_bomber).where(\n ApplicationR.promised_date < next_day, ApplicationR.promised_date >=\n today, ApplicationR.promised_date.is_null(False), ApplicationR.\n status != ApplicationStatus.REPAID.value, ApplicationR.cycle <\n Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(\n ApplicationR.latest_bomber)\n bids = [a.latest_bomber_id for a in apps]\n if not bids:\n return\n q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(\n BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids\n ).execute()\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)\ndef update_today_switch_every_day(payload, msg_id):\n q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(\n BomberPtp.auto_ext.is_null(False)).execute()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@action(MessageAction.BOMBER_HEALTH_CHECK)\ndef health_check(payload, msg_id):\n pass\n\n\n<mask token>\n\n\ndef dpd1_process(lst):\n \"\"\"已废弃的方法\"\"\"\n if not lst:\n return\n for key, l in lst.items():\n rule = getattr(BeforeInBomber, key).value\n query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)\n ).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(\n rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.\n call_success()))\n success_set = {i.loanid for i in query}\n failed_list = list(set(l) - success_set)\n post_params = {'$and': rule.get('$and'), 'app_list': failed_list}\n resp = Hyperloop().post('/bomber/score/verify', json=post_params)\n if not resp.ok:\n logging.error('hyperloop score verification failed: %s, %s',\n str(resp.status_code), str(resp.text))\n logging.error('hyperloop score verification failed: %s', str(\n post_params))\n continue\n logging.debug('hyperloop score verification success: %s', resp.content)\n resp_json = resp.json()\n app_list = resp_json['data']\n if not app_list:\n continue\n for item in app_list:\n if random.randint(0, 5) == 1:\n send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':\n int(item)})\n\n\n@action(MessageAction.GET_IVR)\ndef get_ivr(payload, msg_id):\n logging.warning('start get_ivr')\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'DPD1-3_INTO_IVR').first()\n now = date.today()\n if sys_config and sys_config.value:\n start = now - timedelta(days=3)\n else:\n start = now\n end = now + timedelta(days=4)\n item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()\n if not item:\n AutoIVR.delete().execute()\n current_page = 0\n elif item.current_page >= item.total_page:\n return\n else:\n current_page = item.current_page\n auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,\n 'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,\n 'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':\n 10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,\n 'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,\n 'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,\n 'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,\n 'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,\n 'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,\n 'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,\n 'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,\n 'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,\n 'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,\n 'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':\n 101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,\n 'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,\n 'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,\n 'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}\n current_page += 1\n with db.atomic() as transaction:\n while True:\n bill_service = BillService()\n ivr_action = bill_service.ivr_pages(page=current_page,\n page_size=500, start_time=utc_datetime(str(start)),\n end_time=utc_datetime(str(end)))\n result = ivr_action['result']\n page_size = int(ivr_action.get('page_size', 0))\n total_page = int(ivr_action.get('total_page', 0))\n insert_args = []\n for a in result:\n due_at = no_utc_datetime(a['due_at'])\n days = (due_at.date() - now).days\n if days == 2:\n continue\n if days > 0:\n time = str(days)\n else:\n time = str(days).replace('-', 'PDP')\n key = a['app_name'] + str(a['su']) + time\n group = auto_ivr.get(key)\n user_id = a['user_id']\n try:\n user_resp = AccountService().get_user(path_params={\n 'user_id': user_id})\n if str(user_resp['mobile_no']) == str(a['user_mobile_no']):\n numbers = a['user_mobile_no']\n else:\n numbers = a['user_mobile_no'] + ',' + user_resp.get(\n 'mobile_no')\n except:\n logging.error('Request Account Service Error.')\n numbers = a['user_mobile_no']\n insert_args.append({'application_id': a['id'], 'numbers':\n numbers, 'group': group, 'user_id': user_id})\n AutoIVR.insert_many(insert_args).execute()\n if current_page == 1:\n IVRActionLog.create(total_page=total_page, proc_date=now,\n page_size=page_size, current_page=current_page)\n item = IVRActionLog.get(IVRActionLog.proc_date == now)\n else:\n item.current_page = current_page\n item.page_size = page_size\n item.total_page = total_page\n item.save()\n transaction.commit()\n current_page += 1\n if current_page > int(total_page):\n break\n if sys_config and sys_config.value:\n try:\n classfiy_dpd_ptp_apps()\n except Exception as e:\n logging.error('dpd1-3_test_error:%s' % str(e))\n\n\ndef ivr_t2_test():\n t2_groups = [39, 40, 41, 42, 43, 44]\n ivr_test_proportion = 0.2\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'IVR_TEST_PROPORTION').first()\n if sys_config and sys_config.value:\n ivr_test_proportion = float(sys_config.value)\n t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.\n status == AutoIVRStatus.AVAILABLE.value)\n t2_dict = defaultdict(list)\n for ivr in t2_ivrs:\n t2_dict[ivr.group].append(ivr.id)\n test_ivr_ids = []\n for group, ivr_ids in t2_dict.items():\n number = ceil(len(ivr_ids) * ivr_test_proportion)\n test_ivr_ids += ivr_ids[:number]\n if not test_ivr_ids:\n return\n q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.\n group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.APP_MERGE)\n@deprecated(version='1.0', reason='This function will be removed soon')\ndef app_merge(payload, msg_id):\n sql = \"\"\"\n select *\n from (\n select a.id as id\n from dashboard.application as a\n inner join repayment.bill2 as b on b.external_id = a.id\n where not exists (\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.app = 'DanaCepat'\n and a.is_first_loan = 1\n and a.apply_at < '2018-08-23 20:50:00'\n and b.overdue_days between 1 and 3\n and b.status != 2) result\n where not exists (\n select 1\n from bomber.application as a\n where a.cycle = 1\n and a.status = 4\n and a.id = result.id\n )\n \"\"\"\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n new_data = cursor.fetchall()\n cursor.close()\n if new_data:\n bomber = [103, 104]\n for d in new_data:\n app_id = {'id': d[0]}\n application_overdue(app_id, None)\n Application.update(status=ApplicationStatus.AB_TEST.value,\n latest_bomber=random.choice(bomber), ptp_bomber=None).where(\n Application.id == d[0]).execute()\n logging.warning('add new app success')\n ptp = date.today() - timedelta(days=1)\n del_sql = (\n \"\"\"\n select a.id\n from bomber.application as a\n where exists(\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.cycle = 1\n and a.status = 4\n and (a.promised_date is null or a.promised_date < \"%s\")\n \"\"\"\n % ptp)\n cursor = readonly_db.get_cursor()\n cursor.execute(del_sql)\n del_date = cursor.fetchall()\n cursor.close()\n if del_date:\n return\n ids = list()\n for d in del_date:\n ids.append(d[0])\n Application.update(status=ApplicationStatus.UNCLAIMED.value,\n latest_bomber=None).where(Application.id << ids).execute()\n\n\n@action(MessageAction.APPLICATION_BOMBER)\ndef application_overdue(payload, msg_id):\n application_id = payload['id']\n sub_bill_id = payload['bill_sub_id']\n local_app = Application.select().where(Application.external_id ==\n application_id).order_by(Application.finished_at).first()\n if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:\n logging.info('application %s overdue, already exists', application_id)\n add_contact(local_app)\n return\n if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:\n overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==\n sub_bill_id, OverdueBillR.external_id == application_id)\n if overdue_bill.exists():\n logging.info(\n 'application %s,sub_bill_id %s overdue, already exists' % (\n application_id, sub_bill_id))\n return\n try:\n sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])\n sub_bill = sub_bill[0]\n except Exception:\n logging.error(\n 'application %s overdue, get sub_bill info failed:Request To repayment Error'\n , application_id)\n return\n if sub_bill['status'] == 2:\n logging.error('application %s overdue, but bills already cleared',\n application_id)\n return\n overdue_days = sub_bill.get('overdue_days', 0)\n if overdue_days == 0:\n logging.info('application {} no overdue'.format(str(application_id)))\n return\n gold_eye = GoldenEye().get('/applications/%s' % application_id)\n if not gold_eye.ok:\n logging.error('get application %s failed: Request to GoldenEye.',\n application_id)\n return\n gold_app = gold_eye.json().get('data')\n user_id = gold_app['user_id']\n apply_history = Dashboard().get('/users/%s/apply-history' % user_id)\n if not apply_history.ok:\n logging.error(\n 'get user %s apply history failed: Request to Dashboard Failed.',\n user_id)\n return\n history = apply_history.json().get('data')\n loan_success_times = len([(1) for i in history if i['status'] in [80, \n 90, 100, 70] and i['id'] != gold_app['id']])\n id = application_id\n type = ApplicationType.CASH_LOAN.value\n bill_id = sub_bill.get('bill_id')\n amount = sub_bill.get('amount')\n amount_net = sub_bill.get('amount_net')\n interest_rate = sub_bill.get('interest_rate')\n overdue_days = sub_bill.get('overdue_days')\n origin_due_at = sub_bill.get('origin_due_at')\n sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,\n 'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),\n 'overdue_days': overdue_days, 'origin_due_at': origin_due_at,\n 'amount': amount, 'amount_net': amount_net, 'interest_rate':\n interest_rate, 'external_id': application_id}\n if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:\n if local_app and local_app.status != ApplicationStatus.REPAID.value:\n sub_overdue_bill['collection_id'] = local_app.id\n local_app.amount += amount\n local_app.amount_net += amount_net\n local_app.save()\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('application %s,sub_bill_id:%s overdue created' %\n (application_id, sub_bill_id))\n return\n else:\n id = idg()\n type = ApplicationType.CASH_LOAN_STAGING.value\n sub_overdue_bill['collection_id'] = id\n ptp_info = BombingHistory.filter(BombingHistory.application == id).first()\n promised_amount = ptp_info and ptp_info.promised_amount\n promised_date = ptp_info and ptp_info.promised_date\n application = Application.create(id=id, user_id=gold_app['user_id'],\n user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[\n 'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],\n contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(\n 'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=\n birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(\n gold_app.get('id_ektp')), profile_province=(gold_app.get(\n 'profile_province') or {}).get('name'), profile_city=(gold_app.get(\n 'profile_city') or {}).get('name'), profile_district=(gold_app.get(\n 'profile_district') or {}).get('name'), profile_residence_time=\n gold_app.get('profile_residence_time'), profile_residence_type=\n gold_app.get('profile_residence_type'), profile_address=gold_app.\n get('profile_address'), profile_education=gold_app.get(\n 'profile_education'), profile_college=(gold_app.get(\n 'profile_college') or {}).get('name'), job_name=gold_app.get(\n 'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get\n ('job_bpjs'), job_user_email=gold_app.get('job_user_email'),\n job_type=gold_app.get('job_type'), job_industry=gold_app.get(\n 'job_industry'), job_department=gold_app.get('job_department'),\n job_province=(gold_app.get('job_province') or {}).get('name'),\n job_city=(gold_app.get('job_city') or {}).get('name'), job_district\n =(gold_app.get('job_district') or {}).get('name'), job_address=\n gold_app.get('job_address'), amount=amount, amount_net=amount_net,\n interest_rate=interest_rate, term=gold_app.get('term'),\n origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=\n sub_bill.get('repay_at'), loan_success_times=loan_success_times,\n arrived_at=datetime.now(), follow_up_date=datetime.now(),\n promised_amount=promised_amount, promised_date=promised_date,\n external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=\n datetime.now())\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('overdue application %s created', application_id)\n Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,\n status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)\n add_contact(application)\n\n\ndef add_contact(application):\n logging.info('start add contact for application: %s', application.id)\n contacts = Contact.filter(Contact.user_id == application.user_id)\n existing_numbers = {contact.number for contact in contacts}\n insert_contacts = list()\n mon_insert_contact = {}\n user_mobile_no = number_strip(application.user_mobile_no)\n if user_mobile_no and user_mobile_no not in existing_numbers:\n insert_contacts.append({'user_id': application.user_id, 'name':\n application.user_name, 'number': user_mobile_no, 'relationship':\n Relationship.APPLICANT.value, 'source': 'apply info',\n 'real_relationship': Relationship.APPLICANT.value})\n existing_numbers.add(number_strip(application.user_mobile_no))\n extra_phone = GoldenEye().get('/users/%s/extra-phone' % application.user_id\n )\n if not extra_phone.ok:\n extra_phone = []\n logging.error('get user %s extra contacts failed', application.user_id)\n else:\n extra_phone = extra_phone.json()['data']\n if extra_phone:\n for i in extra_phone:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({'user_id': application.user_id, 'name':\n application.user_name, 'number': number, 'relationship':\n Relationship.APPLICANT.value, 'source': 'extra phone',\n 'real_relationship': Relationship.APPLICANT.value})\n key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n ec_contact = []\n contact = json.loads(application.contact or '[]')\n for i in contact:\n if number_strip(i['mobile_no']\n ) not in existing_numbers and number_strip(i['mobile_no']):\n ec_contact.append({'user_id': application.user_id, 'name': i[\n 'name'], 'number': number_strip(i['mobile_no']),\n 'relationship': Relationship.FAMILY.value, 'sub_relation':\n SubRelation.EC.value, 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number_strip(i['mobile_no']\n ), ContactType.F_EC.value\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['mobile_no']))\n if i['type'] != 1:\n continue\n if number_strip(i['tel_no']) not in existing_numbers and number_strip(i\n ['tel_no']):\n ec_contact.append({'user_id': application.user_id, 'name': i[\n 'name'], 'number': number_strip(i['tel_no']),\n 'relationship': Relationship.FAMILY.value, 'sub_relation':\n SubRelation.EC.value, 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number_strip(i['tel_no']\n ), ContactType.F_EC.value\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['tel_no']))\n if ec_contact:\n Contact.insert_many(ec_contact).execute()\n if all((application.job_tel, number_strip(application.job_tel), \n number_strip(application.job_tel) not in existing_numbers)):\n insert_contacts.append({'user_id': application.user_id, 'name':\n None, 'number': number_strip(application.job_tel),\n 'relationship': Relationship.COMPANY.value, 'source':\n 'basic info job_tel', 'real_relationship': Relationship.COMPANY\n .value})\n key = user_mobile_no, number_strip(application.job_tel\n ), ContactType.C_BASIC_INFO_JOB_TEL.value\n mon_insert_contact[key] = 1, 0, None\n existing_numbers.add(number_strip(application.job_tel))\n sms_contacts = GoldenEye().get('/applications/%s/sms-contacts' %\n application.external_id)\n if not sms_contacts.ok:\n sms_contacts = []\n logging.info('get user %s sms contacts failed', application.external_id\n )\n else:\n sms_contacts = sms_contacts.json()['data']\n if sms_contacts:\n for i in sms_contacts:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({'user_id': application.user_id, 'name':\n i['name'][:128], 'number': number, 'relationship':\n Relationship.SUGGESTED.value, 'source': 'sms contacts',\n 'real_relationship': Relationship.SUGGESTED.value})\n key = user_mobile_no, number, ContactType.S_SMS_CONTACTS.value\n mon_insert_contact[key] = 1, 0, i['name'][:128]\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n cf = GoldenEye().get('/applications/%s/call/frequency' % application.\n external_id)\n if not cf.ok:\n call_frequency = []\n logging.error('get application %s call frequency error',\n application.external_id)\n else:\n call_frequency = cf.json()['data']\n insert_contacts = []\n fm = GoldenEye().get('/applications/%s/contact/family-member' %\n application.external_id)\n if not fm.ok:\n family = []\n logging.error('get application %s family-member info error',\n application.external_id)\n else:\n family = fm.json()['data']\n if family:\n for i in family:\n if not i.get('number'):\n logging.info('family member %s' % str(i))\n continue\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n logging.info('family members: %s' % str(i))\n insert_contacts.append({'user_id': application.user_id, 'name':\n i['name'][:128], 'number': number, 'relationship':\n Relationship.FAMILY.value, 'source': FamilyContactType.\n CALLEC.value, 'total_count': i.get('total_count', 1),\n 'total_duration': i.get('total_duration', 0),\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number, ContactType.F_CALL_EC.value\n mon_insert_contact[key] = i.get('total_count', 1), i.get(\n 'total_duration', 0), i['name'][:128]\n existing_numbers.add(number)\n mon_update_contact = {}\n if call_frequency:\n with db.atomic():\n count = 1\n for i in call_frequency:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n Contact.update(total_count=i['total_count'],\n total_duration=i['total_duration']).where(Contact.\n number == number, Contact.user_id == application.\n user_id)\n key = user_mobile_no, number\n mon_update_contact[key] = i['total_count'], i[\n 'total_duration']\n continue\n if count < 6:\n insert_contacts.append({'user_id': application.user_id,\n 'name': i['name'][:128], 'number': number,\n 'relationship': Relationship.FAMILY.value,\n 'total_count': i['total_count'], 'total_duration':\n i['total_duration'], 'source': FamilyContactType.\n CALLTOP5.value, 'real_relationship': Relationship.\n FAMILY.value})\n count += 1\n key = user_mobile_no, number, ContactType.F_CALL_TOP5.value\n mon_insert_contact[key] = i['total_count'], i[\n 'total_duration'], i['name'][:128]\n else:\n insert_contacts.append({'user_id': application.user_id,\n 'name': i['name'][:128], 'number': number,\n 'relationship': Relationship.SUGGESTED.value,\n 'total_count': i['total_count'], 'total_duration':\n i['total_duration'], 'source': 'call frequency',\n 'real_relationship': Relationship.SUGGESTED.value})\n key = (user_mobile_no, number, ContactType.\n S_CALL_FREQUENCY.value)\n mon_insert_contact[key] = i['total_count'], i[\n 'total_duration'], i['name'][:128]\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n next_apply_list = AccountService().add_contact(application.user_id)\n for next_apply in next_apply_list:\n number = number_strip(str(next_apply))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=application.\n user_name, number=number, relationship=Relationship.\n SUGGESTED.value, source='online profile phone',\n real_relationship=Relationship.SUGGESTED.value)\n key = (user_mobile_no, number, ContactType.\n S_ONLINE_PROFILE_PHONE.value)\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n next_applicant = GoldenEye().get('/bomber/%s/dual_contact' %\n application.user_id)\n if not next_applicant.ok:\n next_applicant = []\n logging.error('get user %s dual_contact contacts failed' %\n application.user_id)\n else:\n next_applicant = next_applicant.json()['data']\n if next_applicant:\n for i in next_applicant:\n number = number_strip(str(i))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=\n application.user_name, number=number, relationship=\n Relationship.APPLICANT.value, source='apply info',\n real_relationship=Relationship.APPLICANT.value)\n key = user_mobile_no, number, ContactType.A_APPLY_INFO.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n numbers = []\n try:\n numbers = AccountService().ktp_number(path_params={'user_id':\n application.user_id})\n except Exception as e:\n logging.info('request ktp numbers failed %s' % str(e))\n for n in numbers:\n number = number_strip(str(n))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=application.\n user_name, number=number, relationship=Relationship.\n APPLICANT.value, source='ktp number', real_relationship=\n Relationship.APPLICANT.value)\n key = user_mobile_no, number, ContactType.A_KTP_NUMBER.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n try:\n ecs = GoldenEye().get('/applications/%s/contact/ec' % application.\n external_id)\n except Exception as e:\n logging.info('request ec-member error: %s' % str(e))\n try:\n if not ecs.ok:\n ec = []\n logging.info('get application %s ec-member info error',\n application.external_id)\n else:\n ec = ecs.json()['data']\n if ec:\n for e in ec:\n number = number_strip(e['numbers'])[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=e[\n 'name'][:128], number=number, relationship=\n Relationship.FAMILY.value, source=FamilyContactType\n .CONTACTEC.value, real_relationship=Relationship.\n FAMILY.value)\n key = (user_mobile_no, number, ContactType.F_CONTACT_EC\n .value)\n mon_insert_contact[key] = 1, 0, e['name'][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add ec_member error:%s' % str(e))\n try:\n mn = GoldenEye().get('/applications/%s/contact/my_number' %\n application.external_id)\n except Exception as e:\n logging.info('request my_number error: %s' % str(e))\n try:\n if not mn.ok:\n my = []\n logging.info('get application %s my_number info error',\n application.external_id)\n else:\n my = mn.json()['data']\n if my:\n for m in my:\n number = number_strip(m)[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=my[m][\n :128], number=number, relationship=Relationship.\n SUGGESTED.value, source='my number',\n real_relationship=Relationship.SUGGESTED.value)\n key = user_mobile_no, number, ContactType.S_MY_NUMBER.value\n mon_insert_contact[key] = 1, 0, my[m][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add my_member error:%s' % str(e))\n try:\n cn = GoldenEye().get('/applications/%s/contact/company-number' %\n application.external_id)\n except Exception as e:\n logging.info('request company-number error: %s' % str(e))\n try:\n if not cn.ok:\n cn = []\n logging.info('get application %s company_number info error',\n application.external_id)\n else:\n cn = cn.json()['data']\n if cn:\n for c in cn:\n number = c\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=cn[c][\n :128], number=number, relationship=Relationship.\n COMPANY.value, source='company', real_relationship=\n Relationship.COMPANY.value)\n key = user_mobile_no, number, ContactType.C_COMPANY.value\n mon_insert_contact[key] = 1, 0, cn[c][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add company_member error:%s' % str(e))\n try:\n ol = AccountService().other_login_contact(userId=application.user_id)\n except Exception as e:\n logging.error('request other_login error: %s' % e)\n ol = {}\n try:\n for o in ol:\n number = number_strip(o)\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=ol[o][:128\n ], number=number, relationship=Relationship.SUGGESTED.\n value, source='other_login', real_relationship=\n Relationship.SUGGESTED.value)\n key = user_mobile_no, number, ContactType.S_OTHER_LOGIN.value\n mon_insert_contact[key] = 1, 0, ol[o][:128]\n except Exception as e:\n logging.error('add other_login number error:%s' % e)\n logging.info('add contact for application %s finished', application.id)\n if mon_insert_contact or mon_update_contact:\n send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON, {\n 'user_mobile_no': user_mobile_no, 'insert_contact': str(\n mon_insert_contact), 'update_contact': str(mon_update_contact),\n 'user_id': application.user_id, 'name': application.user_name})\n\n\n<mask token>\n\n\ndef get_contact_from_mongo(number):\n if not number:\n return []\n query = TotalContact.objects(src_number=number, source__in=TotalContact\n .available()).order_by('source')\n lst = []\n for c in query:\n relation = TotalContact.relationship(c.source)\n if relation == -1:\n continue\n source = TotalContact.str_source(c.source)\n if not source:\n continue\n lst.append({'related_number': c.dest_number, 'source': source,\n 'is_calc': c.is_calc, 'total_count': c.total_count,\n 'total_duration': c.total_duration, 'relation': relation,\n 'name': c.dest_name})\n return lst\n\n\n<mask token>\n\n\ndef check_key_not_none(payload, keys):\n for key in keys:\n if payload.get(key) is None:\n logging.error('Missing args {}'.format(str(key)))\n return False\n return True\n\n\n@action(MessageAction.BILL_PAID)\ndef bill_paid(payload, msg_id):\n validate = check_key_not_none(payload, ['external_id', 'late_fee_part',\n 'principal_part', 'paid_at', 'bill_sub_id', 'partner_bill_id'])\n if not validate:\n logging.error('payload key not fully pass in.')\n return\n external_id = payload['external_id']\n late_fee_part = Decimal(payload['late_fee_part'])\n principal_part = Decimal(payload['principal_part'])\n paid_at = payload['paid_at']\n partner_bill_id = payload['partner_bill_id']\n logging.debug(\n 'application %s paid principal part %s, paid late fee part %s',\n external_id, principal_part, late_fee_part)\n application = Application.filter(Application.external_id == external_id\n ).order_by(-Application.created_at).first()\n if not application:\n logging.info('application %s paid, not found application', external_id)\n return\n sub_bill_id = payload['bill_sub_id']\n overdue_bill = OverdueBillR.select().where(OverdueBillR.collection_id ==\n application.id, OverdueBillR.sub_bill_id == sub_bill_id).first()\n if (application.type == ApplicationType.CASH_LOAN_STAGING.value and not\n overdue_bill):\n logging.info('bill sub not in bomber %s', sub_bill_id)\n return\n with db.atomic():\n repay_at = str_no_utc_datetime(payload['latest_repay_at'])\n Application.update(repay_at=repay_at).where(Application.id ==\n application.id).execute()\n RepaymentLog.create(application=application.id, is_bombed=True,\n current_bomber=application.latest_bomber_id, cycle=application.\n cycle, principal_part=principal_part, late_fee_part=\n late_fee_part, repay_at=paid_at, ptp_bomber=application.\n ptp_bomber, latest_call=application.latest_call, periods=\n overdue_bill.periods if overdue_bill else None, overdue_bill_id\n =overdue_bill.id if overdue_bill else None, partner_bill_id=\n partner_bill_id)\n phone_status = PhoneStatus.CONNECTED.value\n real_relationship = RealRelationship.user_values()\n commit = CallActionCommit.NO.value\n number = CallActions.select(CallActions.number).where(CallActions.\n phone_status == phone_status, CallActions.real_relationship <<\n real_relationship, CallActions.commit == commit, CallActions.\n application == application.id).order_by(-CallActions.created_at\n ).first()\n if number:\n Contact.update(call_priority=PriorityStatus.REPAY.value).where(\n Contact.user_id == application.user_id, Contact.\n call_priority == PriorityStatus.LAST.value).execute()\n Contact.update(call_priority=PriorityStatus.LAST.value).where(\n Contact.user_id == application.user_id, Contact.number ==\n number.number).execute()\n if not application.latest_bomber_id:\n return\n Inbox.create(title='application %s,sub_bill_id %s repaid' % (\n application.external_id, sub_bill_id), content=\n 'application %s,sub_bill_id %s repaid' % (application.\n external_id, sub_bill_id), receiver=application.\n latest_bomber_id or application.last_bomber_id, category=\n InboxCategory.REPAID.value)\n\n\n@action(MessageAction.BILL_RELIEF)\ndef bill_relief(payload, msg_id):\n \"\"\"已废弃\"\"\"\n bill = payload['head_bill']\n repay_at = str_no_utc_datetime(bill['latest_repay_at'])\n updated_row = Application.update(repay_at=repay_at).where(Application.\n id == bill['external_id']).execute()\n logging.info('application %s bill relief done', bill['external_id'])\n return updated_row\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)\ndef calc_overdue_days_over(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query = Application.update(overdue_days=overdue_days).where(Application\n .status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.\n overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)\n updated_rows_count = query.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count)\n try:\n calc_overdue_days_over_instalment()\n except Exception as e:\n logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))\n apps = Application.filter(Application.status << [ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value], Application.overdue_days > 95, \n Application.promised_date.is_null(True) | (fn.DATE(Application.\n promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {\n 'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n\n\ndef calc_overdue_days_over_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days > 95)\n updated_rows_count = query.execute()\n logging.info(\n 'calc_overdue_days_over_instalment done,count:%s,status:%s' % (\n updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.collection_id,\n OverdueBill.overdue_days).join(Application, JOIN_LEFT_OUTER, on\n =OverdueBill.collection_id == Application.id).where(Application\n .status == status, Application.type == ApplicationType.\n CASH_LOAN_STAGING.value)\n app_update = {}\n for ob in overdue_bills:\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)\ndef calc_overdue_days(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query_unclaimed = Application.update(overdue_days=overdue_days).where(\n Application.status == ApplicationStatus.UNCLAIMED.value, \n Application.overdue_days <= 95, Application.type == ApplicationType\n .CASH_LOAN.value)\n updated_rows_count_unclaimed = query_unclaimed.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_unclaimed)\n query_processing = Application.update(overdue_days=overdue_days).where(\n Application.status == ApplicationStatus.PROCESSING.value, \n Application.overdue_days <= 95, Application.type == ApplicationType\n .CASH_LOAN.value)\n updated_rows_count_processing = query_processing.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_processing)\n query_test = Application.update(overdue_days=overdue_days).where(\n Application.status == ApplicationStatus.AB_TEST.value, Application.\n overdue_days <= 95, Application.type == ApplicationType.CASH_LOAN.value\n )\n updated_rows_count_test = query_test.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_test)\n calc_overdue_days_instalment()\n apps = Application.select(Application.id).where(Application.status << [\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value, ApplicationStatus.AB_TEST.value], Application.overdue_days <=\n 95, Application.promised_date.is_null(True) | (fn.DATE(Application.\n promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {\n 'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n Application.update(C1A_entry=datetime.now()).where(Application.status <<\n [ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value, ApplicationStatus.AB_TEST.value], Application.overdue_days == 4\n ).execute()\n\n\ndef calc_overdue_days_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n today_now_time = datetime.now()\n month_first_day = today_now_time.replace(day=1, hour=1, minute=30,\n second=0, microsecond=0)\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days <= 95)\n updated_rows_count = query.execute()\n logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %\n (updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.\n created_at, OverdueBill.collection_id, OverdueBill.overdue_days\n ).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.\n collection_id == Application.id).where(Application.status ==\n status, Application.type == ApplicationType.CASH_LOAN_STAGING.value\n )\n app_update = {}\n for ob in overdue_bills:\n if (ob.status == ApplicationStatus.REPAID.value and ob.\n created_at < month_first_day):\n continue\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)\ndef automatic_escalation(payload, msg_id):\n app_ids = payload.get('application_list', [])\n if not app_ids:\n return\n apps = Application.select().where(Application.id.in_(app_ids), \n Application.status != ApplicationStatus.REPAID.value)\n for a in apps:\n new_cycle = application_entry_different_calculations(a)\n if a.overdue_days < 90:\n logging.info(\n 'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'\n .format(a.id, new_cycle, a.cycle, a.overdue_days))\n if new_cycle > a.cycle:\n with db.atomic():\n if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle\n .C1B.value):\n bomber_id = (a.latest_bomber_id if a.latest_bomber_id else\n a.cycle)\n DispatchAppHistory.update(out_at=datetime.now(),\n out_overdue_days=a.overdue_days).where(\n DispatchAppHistory.application == a.id, \n DispatchAppHistory.bomber_id == bomber_id).execute()\n Escalation.create(application=a.id, type=EscalationType.\n AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,\n current_cycle=a.cycle, escalate_to=new_cycle,\n current_bomber_id=a.latest_bomber)\n dis_app_update = DispatchApp.update(status=DisAppStatus.\n ABNORMAL.value).where(DispatchApp.application == a.id)\n dis_app_update.execute()\n a.cycle = new_cycle\n a.last_bomber = a.latest_bomber\n a.status = ApplicationStatus.UNCLAIMED.value\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n if new_cycle == Cycle.C1B.value:\n a.C1B_entry = datetime.now()\n elif new_cycle == Cycle.C2.value:\n a.C2_entry = datetime.now()\n elif new_cycle == Cycle.C3.value:\n a.C3_entry = datetime.now()\n a.save()\n logging.info('automatic escalation done')\n\n\ndef application_entry_different_calculations(app):\n conf = {(1): [1, 10], (2): [11, 30], (3): [31, 60], (4): [61, 90], (5):\n [91, 999999]}\n for new_cycle, scopes in conf.items():\n if scopes[0] <= app.overdue_days <= scopes[1]:\n return new_cycle\n return app.cycle\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY)\ndef cron_summary(payload, msg_id):\n \"\"\"已废弃\"\"\"\n employees = Bomber.select(Bomber, Role).join(Role)\n summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,\n 'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,\n 'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':\n 0, 'sms_sent': 0} for i in employees}\n now_date = date.today()\n cal_date = now_date - timedelta(days=1)\n claimed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('claimed')).where(fn.DATE(Application.\n claimed_at) == cal_date, Application.status << [ApplicationStatus.\n PROCESSING.value, ApplicationStatus.REPAID.value], Application.\n latest_bomber.is_null(False)).group_by(Application.latest_bomber)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False)).group_by(\n Application.latest_bomber)\n completed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('completed')).where(Application.\n latest_bombing_time.is_null(False), fn.DATE(Application.\n latest_bombing_time) == cal_date, Application.latest_bomber.is_null\n (False)).group_by(Application.latest_bomber)\n escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(\n Escalation.id).alias('escalated')).where(fn.DATE(Escalation.\n created_at) == cal_date, Escalation.type == EscalationType.\n AUTOMATIC.value, Escalation.current_bomber.is_null(False), \n Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation\n .current_bomber)\n transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).\n alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==\n cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(\n Transfer.operator)\n promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(\n BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.\n created_at) == cal_date, BombingHistory.result == BombingResult.\n HAS_PROGRESS.value).group_by(BombingHistory.bomber)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,\n RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.\n current_bomber)\n calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id\n ).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==\n cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)\n calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.\n record_id).alias('calls_connected')).where(fn.DATE(CallLog.\n time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==\n '1').group_by(CallLog.user_id)\n sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(\n ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_\n (ConnectType.sms()), ConnectHistory.created_at >= cal_date, \n ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)\n for i in claimed:\n summary[i.latest_bomber_id]['claimed'] += i.claimed\n for i in completed:\n summary[i.latest_bomber_id]['completed'] += i.completed\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n for i in escalated:\n summary[i.current_bomber_id]['escalated'] += i.escalated\n for i in transferred:\n summary[i.operator_id]['transferred'] += i.transferred\n for i in promised:\n summary[i.bomber_id]['promised'] += i.promised\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in calls_made:\n summary[int(i.user_id)]['calls_made'] += i.calls_made\n for i in calls_connected:\n summary[int(i.user_id)]['calls_connected'] += i.calls_connected\n for i in sms_sent:\n summary[i.operator_id]['sms_sent'] += i.sms_sent\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'claimed': data['claimed'], 'completed': data['completed'],\n 'cleared': data['cleared'], 'escalated': data['escalated'],\n 'transferred': data['transferred'], 'promised': data['promised'\n ], 'amount_recovered': data['amount_recovered'], 'calls_made':\n data['calls_made'], 'calls_connected': data['calls_connected'],\n 'sms_sent': data['sms_sent'], 'date': cal_date})\n if insert_args:\n Summary.insert_many(insert_args).execute()\n cycle_args = []\n escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(\n Escalation.id).alias('escalated_in')).where(Escalation.status ==\n ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==\n cal_date).group_by(Escalation.escalate_to)\n for i in escalated_in:\n cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.\n escalated_in, 'date': cal_date})\n amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM\n (RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)\n for i in amount_recovered_total:\n amount_recovered_total = i.principal_part + i.late_fee_part\n cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':\n amount_recovered_total, 'date': cal_date})\n if cycle_args:\n Summary.insert_many(cycle_args).execute()\n logging.info('cal summary done')\n send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY2)\ndef cron_summary2(payload, msg_id):\n \"\"\"已废弃,定时任务还在执行,具体情况待确定\"\"\"\n cal_date = date.today() - timedelta(days=1)\n employees = Bomber.select(Bomber, Role).join(Role)\n auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,\n AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')\n ).where(fn.DATE(AutoCallActions.created_at) == cal_date)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.\n is_null(False), RepaymentLog.is_bombed == True)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False))\n auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,\n AutoCallActions.result)\n amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)\n cleared = cleared.group_by(Application.latest_bomber)\n summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,\n 'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered': \n 0} for e in employees}\n for a in auto_call_actions:\n summary[a.bomber_id]['answered_calls'] += a.count\n if a.result == AutoCallResult.PTP.value:\n summary[a.bomber_id]['ptp'] += a.count\n if a.result == AutoCallResult.FOLLOW_UP.value:\n summary[a.bomber_id]['follow_up'] += a.count\n if a.result == AutoCallResult.NOT_USEFUL.value:\n summary[a.bomber_id]['not_useful'] += a.count\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'answered_calls': data['answered_calls'], 'ptp': data['ptp'],\n 'follow_up': data['follow_up'], 'not_useful': data['not_useful'\n ], 'cleared': data['cleared'], 'amount_recovered': str(data[\n 'amount_recovered']), 'date': cal_date})\n if insert_args:\n Summary2.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_DISCOUNT_APPROVED)\ndef bomber_discount_approved(payload, msg_id):\n app_id = payload['id']\n msg_type = payload['msg_type']\n discount_to = payload['discount_to']\n effective_to = payload['effective_to']\n application = Application.filter(Application.id == app_id).first()\n if not application:\n logging.error(\n 'discount approved msg send failed application %s not found',\n app_id)\n return\n template = Template.select(Template.text, Template.app).where(Template.\n type == ConnectType.AUTO_SMS.value, Template.id << Template.\n get_auto_sms_tpl(msg_type), Template.app == application.app).first()\n if not template:\n logging.error('discount approved msg send failed template %s not found'\n , msg_type)\n return\n promised_date = None\n if application.promised_date:\n promised_date = application.promised_date.strftime('%d-%m-%Y')\n tpl_data = {'user_name': application.user_name, 'due_days': application\n .overdue_days, 'app_name': application.app, 'phone': application.\n user_mobile_no, 'cs_number': cs_number_conf.get(application.app,\n '02150202889'), 'promised_date': promised_date, 'discount_to':\n discount_to, 'effective_to': effective_to}\n content = template.text.format(**tpl_data)\n data_list = [{'receiver': '62' + application.user_mobile_no, 'content':\n content, 'title': ''}]\n send_sms(data_list, msg_type, application.app)\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST)\ndef bomber_auto_call_list(payload, msg_id):\n with db.atomic():\n bomber_dispatch_app()\n dispatch_instalment_app()\n dis_apps = DispatchApp.select(DispatchApp.application).where(\n DispatchApp.status == DisAppStatus.NORMAL.value)\n c1_apps = Application.select(Application.id, Application.cycle,\n Application.follow_up_date, Application.called_times).where(Application\n .status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.\n AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.\n is_rejected == False, Application.promised_date.is_null(True) | (fn\n .DATE(Application.promised_date) < datetime.today().date())).order_by(\n Application.overdue_days, Application.apply_at)\n dis_apps_ids = [da.application_id for da in dis_apps]\n insert_args = []\n for a in c1_apps:\n if a.id in dis_apps_ids:\n continue\n insert_args.append({'application': a.id, 'cycle': a.cycle,\n 'follow_up_date': a.follow_up_date, 'called_times': 1 if a.\n called_times else 0, 'description': 'init'})\n if not insert_args:\n logging.error('no application need auto call')\n with db.atomic():\n AutoCallList.delete().execute()\n for idx in range(0, len(insert_args), 100):\n AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()\n for idx in range(0, len(insert_args), 100):\n application_list = [i['application'] for i in insert_args[idx:idx +\n 100]]\n send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {\n 'application_list': application_list})\n logging.info('bomber generate auto call list finished')\n send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})\n\n\nclass ChangeBomberTool(object):\n\n @staticmethod\n def in_record(bomber_id, ids, bd):\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.\n alias('application_id'), R(str(bomber_id)).alias('bomber_id'),\n fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(\n 'expected_out_time'), Application.overdue_days.alias(\n 'entry_overdue_days')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << ids)\n Application.update(latest_bomber=bomber_id).where(Application.id.\n in_(ids)).execute()\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bd),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n @staticmethod\n def out_record(a, bd):\n _id = str(a.id)\n DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a\n .overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]\n .get('principal_paid')), out_late_fee_pending=bd[_id].get(\n 'late_fee') - bd[_id].get('late_fee_paid')).where(\n DispatchAppHistory.application == a.id, DispatchAppHistory.\n bomber_id == a.latest_bomber_id).execute()\n a.last_bomber = a.latest_bomber\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n a.save()\n\n @staticmethod\n def classify(l, b):\n if len(l) == 1:\n return l[0]\n _l = filter(lambda x: x['bomber'] != b, l)\n return min(_l, key=lambda x: len(x['ids']))\n\n\n<mask token>\n\n\ndef bomber_dispatch_app():\n try:\n c1a_dispatch_app()\n except Exception as e:\n logging.error('c1a_dispatch_app error:%s' % str(e))\n cycle = {(1): 10, (2): 30, (3): 60, (4): 90}\n apps = Application.select().where(fn.DATE(Application.C2_entry) == date\n .today(), Application.type == ApplicationType.CASH_LOAN.value)\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C2.value)\n apps_ids = [a.id for a in apps]\n dispatch_inserts = []\n start_index = 0\n apps_length = len(apps_ids)\n logging.warning('apps length %s' % str(apps_length))\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)\n gen = CycleIter([b.id for b in bombers])\n existing_list = []\n end_index = start_index + int(apps_length * p.app_percentage)\n logging.info('partner length %s' % str(end_index))\n if not apps_ids[start_index:end_index]:\n continue\n bills = BillService().bill_list(application_ids=apps_ids[\n start_index:end_index])\n bill_dict = {bill['application_id']: bill for bill in bills}\n for a_id in apps_ids[start_index:end_index]:\n bomber = average_gen(gen, existing_list)\n q = DispatchApp.delete().where(DispatchApp.application == a_id\n ).execute()\n dispatch_inserts.append({'application': a_id, 'bomber': bomber,\n 'partner': p.id})\n application = Application.select().where(Application.id == a_id\n ).first()\n application.latest_bomber = bomber\n application.status = ApplicationStatus.AB_TEST.value\n application.ptp_bomber = None\n application.save()\n day_next_cycle = cycle.get(application.cycle\n ) - application.overdue_days\n DispatchAppHistory.create(application=a_id, partner_id=p.id,\n bomber_id=bomber, entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - Decimal(\n bill_dict[a_id].get('principal_paid')),\n entry_late_fee_pending=Decimal(bill_dict[a_id].get(\n 'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')\n ), expected_out_time=date.today() + timedelta(days=\n day_next_cycle))\n start_index = end_index\n with db.atomic():\n for idx in range(0, len(dispatch_inserts), 100):\n DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()\n config = SystemConfig.prefetch(SCI.AB_TEST_C2)\n c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)\n c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)\n c2 = apps_ids[start_index:]\n if c2:\n bills = BillService().bill_list(application_ids=c2)\n else:\n bills = []\n bill_dict = {bill['application_id']: bill for bill in bills}\n logging.info('c2 AB_test length: %s' % str(c2))\n gen = CycleIter(c2_bomber)\n existing_list = []\n for c in c2:\n bomber = average_gen(gen, existing_list)\n application = Application.filter(Application.id == c).first()\n application.status = ApplicationStatus.AB_TEST.value\n application.latest_bomber = bomber\n application.ptp_bomber = None\n application.save()\n day_next_cycle = 46 - application.overdue_days\n DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at\n =datetime.now(), entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - bill_dict[c].get(\n 'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(\n 'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),\n expected_out_time=date.today() + timedelta(days=day_next_cycle))\n ab_test_other()\n\n\ndef c1a_dispatch_app():\n today = datetime.today().date()\n tomorrow = today + timedelta(days=1)\n c1a_apps = Application.select().where(Application.status << [\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value], Application.dpd1_entry >= today, Application.dpd1_entry <\n tomorrow, Application.type == ApplicationType.CASH_LOAN.value)\n all_aids = [a.id for a in c1a_apps]\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C1A.value)\n end = 0\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n is_del == 0)\n start = end\n end += int(len(all_aids) * p.app_percentage)\n aids = all_aids[start:end]\n bids = [b.id for b in bombers]\n if not bids or not aids:\n continue\n average_number = get_average_number(len(aids), len(bids))\n p_end = 0\n for i, bid in enumerate(bids):\n p_start = p_end\n p_end += average_number[i]\n b_aids = aids[p_start:p_end]\n with db.atomic():\n q = Application.update(latest_bomber=bid, status=\n ApplicationStatus.AB_TEST.value).where(Application.id <<\n b_aids).execute()\n params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,\n 'application_ids': b_aids, 'dest_bomber_id': bid}\n new_in_record(**params)\n try:\n dispatch_inserts = []\n for aid in b_aids:\n dispatch_inserts.append({'application': aid, 'bomber':\n bid, 'partner': p.id, 'status': DisAppStatus.NORMAL\n .value})\n if dispatch_inserts:\n q = DispatchApp.insert_many(dispatch_inserts).execute()\n except Exception as e:\n logging.error('c1a分件写入dispatch_app error:%s' % str(e))\n\n\n<mask token>\n\n\ndef get_cash_bomber(bids, cycle):\n cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==\n 0, Bomber.instalment != cycle)\n cash_bids = [b.id for b in cash_bombers]\n return cash_bids\n\n\n<mask token>\n\n\ndef out_and_in_record_instalment(**kwargs):\n if not kwargs.get('application_ids'):\n return\n out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(\n DispatchAppHistory.application << kwargs['application_ids'],\n DispatchAppHistory.out_at.is_null(True)).execute()\n cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}\n period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')\n kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(kwargs['dest_bomber_id'])).alias(\n 'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.\n alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).\n alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << kwargs[\n 'application_ids'])\n application_list = list(subquery)\n for idx in range(0, len(application_list), 50):\n applications = application_list[idx:idx + 50]\n app_ids = [i.application_id for i in applications]\n overdue_bills = OverdueBill.select().where(OverdueBill.\n collection_id << app_ids)\n sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]\n bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)\n insert_args = lambad_instalment_result(bill_list, applications)\n if not insert_args:\n continue\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef c1b_dispatch_in_record(**kwargs):\n app_ids = kwargs.get('apps')\n partner_id = kwargs.get('partner_id', 'null')\n bill_dict = kwargs.get('bill_dict')\n period = kwargs.get('period')\n bomber_id = kwargs.get('bomber_id')\n if not all([app_ids, partner_id, bill_dict, period]):\n return False\n bill_dict = {str(k): v for k, v in bill_dict.items()}\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().\n alias('entry_at'), Application.overdue_days.alias(\n 'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.id << app_ids)\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef check_call_history(application):\n app_create_at = application.created_at + timedelta(days=4)\n if datetime.today().date() > app_create_at.date():\n call_actions = CallActions.select().where(CallActions.type == 0, \n CallActions.application == application.id, CallActions.\n created_at > datetime.now() - timedelta(days=5))\n for call in call_actions:\n if call.phone_status == PhoneStatus.CONNECTED.value:\n return True\n return False\n return True\n\n\n@action(MessageAction.BOMBER_SCAVENGER)\ndef scavenger(payload, msg_id):\n scavenger_time = -60\n scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'SCAVENGER_TIME').first()\n if scavenger and scavenger.value.isdigit():\n scavenger_time = -int(scavenger.value)\n update_auto_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value, description='scavenger').where(AutoCallList.status ==\n AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime\n .now() + timedelta(minutes=scavenger_time))\n count = update_auto_call_list.execute()\n logging.info('scavenger processed %s application', count)\n mail_box_scavenger_time = -30\n mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'MAIL_BOX_SCAVENGER_TIME').first()\n if mail_box_scavenger and mail_box_scavenger.value.isdigit():\n mail_box_scavenger_time = -int(mail_box_scavenger.value)\n update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.\n value, AutoCallList.updated_at < datetime.now() + timedelta(minutes\n =mail_box_scavenger_time))\n mail_box_count = update_mail_box_call_list.execute()\n logging.info('scavenger update mail box %s', mail_box_count)\n update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value\n ).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.\n updated_at < datetime.now() + timedelta(minutes=-30))\n ivr_result = update_auto_ivr.execute()\n logging.info('scavenger update %s ivr' % ivr_result)\n\n\n<mask token>\n\n\n@action(MessageAction.REPORT_BOMBER_COLLECTION)\ndef report_bomber_collection(payload, msg_id):\n start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)\n ).scalar()\n now = datetime.now()\n if start_date and str(start_date) == str(now)[:10]:\n return\n end_date = str(now + timedelta(days=1))[:10]\n start_date = str(now)[:10]\n dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))\n all_overdue_loan_sql1 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n GROUP BY 1;\n \"\"\"\n s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()\n d1 = OperatedDict(s_data1)\n all_overdue_loan_sql2 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE DATE(ba.follow_up_date) > CURDATE()\n AND ba.called_counts = 0\n GROUP BY 1;\n \"\"\"\n s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()\n d2 = OperatedDict(s_data2)\n overdue_loans_entered_into_predict_call_system_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE ba.called_counts >= 1\n GROUP BY 1;\n \"\"\"\n s_data3 = readonly_db.execute_sql(\n overdue_loans_entered_into_predict_call_system_sql).fetchall()\n d3 = OperatedDict(s_data3)\n loans_completed_sql = \"\"\"\n SELECT ba.cycle, COUNT(DISTINCT ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()\n d4 = OperatedDict(s_data4)\n connected_calls_automatic_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()\n d5 = OperatedDict(s_data5)\n connected_calls_manual_sql = \"\"\"\n SELECT bb.cycle, COUNT(bb.id)\n FROM bomber.bombing_history bb\n WHERE DATE(bb.created_at) = curdate()\n AND (bb.bomber_id < 150 OR bb.bomber_id > 200)\n GROUP BY bb.cycle;\n \"\"\"\n s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()\n d6 = OperatedDict(s_data6)\n logging.info('Directly get data from database successfully.')\n c1 = d1 - d2\n c2 = d3\n c3 = c2 / c1\n c4 = d4\n c5 = c4 / c2\n c6 = d5\n c7 = c6 / c4\n c8 = d6\n c9 = OperatedDict(get_agent())\n c10 = (c6 + c8) / c9\n try:\n c11 = average_call_duration_team(start_date, end_date)\n except AttributeError:\n c11 = {}\n lst = []\n for i in range(1, 5):\n lst.append({'apply_date': start_date, 'cycle': dct[i],\n 'all_overdue_loan': c1.get(i, 0),\n 'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),\n 'of_overdue_loans_entered_into_predict_call_system': round(c3.\n get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),\n 'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *\n 100, 1), 'connected_calls_automatic': c6.get(i, 0),\n 'connected_calls_automatic_completed_loans': round(c7.get(i, 0),\n 1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i, \n 0), 'average_calls_agent': round(c10.get(i, 0), 1),\n 'average_call_duration_team': round(c11.get(i, 0), 1)})\n ReportCollection.insert_many(lst).execute()\n logging.info('report_bomber_collection:Done!')\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)\ndef bomber_auto_call_list_record(payload, msg_id):\n \"\"\"记录一年的auto_call_list,删除前一天的数据,增加今天的数据\"\"\"\n now = datetime.now()\n if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):\n date_sql = \"\"\"\n SELECT DATE(created_at) FROM auto_call_list_record\n GROUP BY DATE(created_at) limit 1\n \"\"\"\n del_date = db.execute_sql(date_sql).fetchone()[0]\n del_sql = \"\"\"\n DELETE FROM auto_call_list_record WHERE date(created_at) = %s\n \"\"\"\n db.execute_sql(del_sql, [del_date])\n sql = \"\"\"\n INSERT INTO auto_call_list_record\n SELECT * FROM auto_call_list\n \"\"\"\n db.execute_sql(sql)\n logging.info('bomber_auto_call_list_record done')\n\n\n<mask token>\n\n\ndef new_out_record(**kwargs):\n if not kwargs['application_ids']:\n return\n DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.\n bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.\n application << kwargs['application_ids'], DispatchAppHistory.out_at\n .is_null(True)).execute()\n if kwargs.get('month_dispatch'):\n return\n try:\n Application.update(ptp_bomber=None).where(Application.id << kwargs[\n 'application_ids']).execute()\n except Exception as e:\n logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[\n 'application_ids'], str(e)))\n\n\n<mask token>\n\n\ndef end_old_application(old_app, paid=False):\n if paid:\n if old_app.status == OldLoanStatus.WAITING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return\n if old_app.status == OldLoanStatus.PROCESSING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return old_app.application_id\n end_date = old_app.end_date\n now = datetime.now()\n if now >= max(end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n old_app.save()\n return old_app.application_id\n\n\n<mask token>\n\n\ndef start_old_application(old_app, cancel=False):\n application_id = old_app.application_id\n if cancel and old_app.status == OldLoanStatus.PAID.value:\n now = datetime.now()\n if old_app.start_date is None:\n old_app.status = OldLoanStatus.WAITING.value\n elif now >= max(old_app.end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n DispatchAppHistory.update(out_at=max(old_app.end_date, old_app.\n promised_date or now)).where(DispatchAppHistory.bomber_id ==\n old_app.bomber_id, DispatchAppHistory.application ==\n application_id).execute()\n else:\n old_app.status = OldLoanStatus.PROCESSING.value\n DispatchAppHistory.update(out_at=None).where(DispatchAppHistory\n .bomber_id == old_app.bomber_id, DispatchAppHistory.\n application == application_id).execute()\n old_app.save()\n return\n application = Application.get_or_none(Application.id == application_id,\n Application.status != ApplicationStatus.REPAID.value, Application.\n overdue_days > 90, Application.promised_date.is_null(True) | (fn.\n DATE(Application.promised_date) < datetime.today().date()))\n if not application:\n logging.error('Can not set old application %s to start collecting',\n application_id)\n return\n if old_app.status in OldLoanStatus.no_available():\n logging.info('%s has finished or paid', old_app.application_id)\n return\n config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)\n sp = config.get(SCI.OLD_APP_PERIOD, SCI.OLD_APP_PERIOD.default_value)\n old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value\n old_app.status = OldLoanStatus.PROCESSING.value\n old_app.bomber_id = old_app_bomber\n old_app.start_date = datetime.now()\n if not old_app.end_date:\n old_app.end_date = datetime.now() + timedelta(days=sp)\n old_app.save()\n in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,\n application_ids=[old_app.application_id], expected_out_time=str(\n old_app.end_date))\n\n\n<mask token>\n\n\ndef run_member_sql(sql):\n result = [0, 0]\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n sql_result = cursor.fetchone()\n if sql_result:\n result = sql_result\n except Exception as e:\n logging.info('run sql error: %s' % str(sql))\n return result\n\n\n<mask token>\n\n\ndef get_before_bomber(date_time):\n begin_time = str(date_time - timedelta(days=7))\n end_time = str(date_time)\n old_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at<date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (begin_time, begin_time))\n old_data = run_one_sql(old_sql)\n new_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at> '%s' \n and created_at<'%s'\n and overdue_days=1;\n \"\"\"\n % (begin_time, end_time))\n new_data = run_one_sql(new_sql)\n dpd4_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>date_add('%s',interval 1 day) \n and created_at< date_add('%s',interval 1 day)\n and overdue_days=4;\n \"\"\"\n % (begin_time, end_time))\n dpd4_data = run_one_sql(dpd4_sql)\n dpd2_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at< date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (end_time, end_time))\n dpd2_data = run_one_sql(dpd2_sql)\n all_money = old_data + new_data\n repayment = all_money - dpd4_data - dpd2_data\n pro = 0\n if all_money:\n pro = repayment / all_money * 100\n RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,\n proportion=pro, repayment=repayment)\n\n\n<mask token>\n\n\ndef get_c1a_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=19)\n repayment_sql = (\n \"\"\"\n select \n sum(b.principal_part+b.late_fee_part) as paid_amount, \n cdt\n from \n (select \n br.principal_part, br.late_fee_part, \n date(cdt) as cdt, br.repay_at, br.application_id\n from (\n select ba.id, ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n if not repayment:\n return\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n for d in repayment:\n repay = d[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n d[1], RepaymentReportInto.cycle == Cycle.C1A.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = round(repay, 3)\n pro = repay / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_c2_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n not_contain_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_money = run_one_sql(not_contain_sql)\n begin_date = date_time - timedelta(days=37)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n not_contain_repay_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_repay = run_all_sql(not_contain_repay_sql)\n if not not_contain_money and repayment:\n return\n for i in ContainOut.values():\n if i == ContainOut.NOT_CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(\n not_contain_money, 3), proportion='0', repayment=0,\n is_first_loan=is_first_loan, contain_out=ContainOut.\n NOT_CONTAIN.value)\n for repay in not_contain_repay:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n elif i == ContainOut.CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(all_money, 3),\n proportion='0', repayment=0, is_first_loan=\n is_first_loan, contain_out=ContainOut.CONTAIN.value)\n for repay in repayment:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\ndef get_c3_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id, ba.c3_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=30)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c3_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c3_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = '%s'\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 4\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C3.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n if not repayment:\n return\n for repay in repayment:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n repay[1], RepaymentReportInto.cycle == Cycle.C3.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = repay_money\n pro = 0\n if report.all_money and int(report.all_money):\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_static_bomber(begin_date):\n active_date = begin_date - timedelta(days=8)\n bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),\n BomberR.last_active_at.alias('active')).where(BomberR.\n last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])\n summary = []\n for bomber in bombers:\n summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':\n bomber.role.cycle, 'work_ind': 0})\n SummaryBomber.insert_many(summary).execute()\n\n\n<mask token>\n\n\n@time_logger\ndef get_call_and_made(end_date, begin_date, real_time_query=False):\n call_sql = (\n \"\"\"\n select \n bomber_id, \n count(case when relationship is not null then application_id end) \n as 'call_cnt', \n count(distinct case when relationship is not null then \n application_id end) as 'call_case',\n count(case when phone_status=4 then application_id end) as 'connect',\n count(distinct case when phone_status=4 then application_id end) \n as 'connect_case'\n from (\n select bomber_id,application_id,phone_status, cycle, relationship\n from bomber.call_actions ba\n where created_at>'%s' and created_at<'%s'\n and type in (0, 1)\n ) a\n group by 1\n \"\"\"\n % (begin_date, end_date))\n calls = run_all_sql(call_sql)\n if real_time_query:\n return calls\n for call in calls:\n bomber, call_cnt, case_made, connect_cnt, case_connect = call\n SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,\n call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(\n SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date\n ).execute()\n return calls\n\n\n@time_logger\ndef get_claimed_cnt(end_date, begin_date, real_time_query=False):\n table_date = begin_date - timedelta(days=30)\n claimed_sql = (\n \"\"\"\n SELECT\n COUNT( `t1`.`application_id` ) AS cnt,\n `t1`.`bomber_id` AS bomber_id \n FROM\n `dispatch_app_history` AS t1 \n WHERE\n ( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null ) \n AND ( `t1`.`bomber_id` != 1000 ) \n AND ( `t1`.`partner_id` IS null ) \n AND ( `t1`.`entry_at` > '%s' ) \n AND ( `t1`.`entry_at` < '%s' ) \n GROUP BY\n `t1`.`bomber_id`\n \"\"\"\n % (begin_date, table_date, end_date))\n claimeds = run_all_sql(claimed_sql)\n if real_time_query:\n return claimeds\n for claimed in claimeds:\n cnt, bomber_id = claimed\n SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == bomber_id).execute()\n return claimeds\n\n\ndef get_sms_data(end_data, begin_data):\n all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(\n 'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')\n ).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.\n created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())\n ).group_by(ConnectHistoryR.operator)\n for sms in all_sms:\n SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==\n begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()\n return all_sms\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_NEW)\ndef summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_active_bomber(begin_date)\n get_call_and_made(end_date, begin_date)\n get_claimed_cnt(end_date, begin_date)\n get_sms_data(end_date, begin_date)\n get_ptp_data(end_date, begin_date)\n get_recover_amount(end_date, begin_date)\n get_unfollowed(begin_date)\n get_unfollowed_call(begin_date)\n\n\ndef get_new_case_amount(begin_date, end_date):\n all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.\n entry_late_fee_pending + DispatchAppHistoryR.\n entry_principal_pending).alias('pending'), DispatchAppHistoryR.\n bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')\n ).where(DispatchAppHistoryR.entry_at > begin_date, \n DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.\n partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)\n for case in all_case:\n SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt\n =case.cnt).where(SummaryBomber.bomber_id == case.bomber_id, \n SummaryBomber.time == begin_date).execute()\n return all_case\n\n\n<mask token>\n\n\ndef get_kp_today(begin_date, end_date):\n sql = (\n \"\"\"\n select bomber_id, count(distinct application_id)\n from( \n SELECT bomber_id, application_id\n FROM bomber.auto_call_actions a\n WHERE promised_date >= '%s' AND created_at < '%s' \n AND EXISTS(select 1 from bomber.application ba \n where a.application_id=ba.id \n and (ba.finished_at is null \n or ba.finished_at > '%s'))\n UNION \n SELECT bomber_id, application_id\n FROM bomber.bombing_history b\n WHERE promised_date >= '%s' AND created_at < '%s'\n AND EXISTS(select 1 from bomber.application ba \n where b.application_id=ba.id \n and (ba.finished_at is null \n or ba.finished_at > '%s'))) result\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, begin_date, begin_date, end_date, begin_date)\n )\n kp_today = run_all_sql(sql)\n for kp in kp_today:\n SummaryBomber.update(KP_today_cnt=kp[1]).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == kp[0]).execute()\n\n\n<mask token>\n\n\ndef get_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c2_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c2_entry > '%s' \n AND ba.c2_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c3_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c3_entry > '%s' \n AND ba.c3_entry < '%s' \n ) a\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date, end_date))\n case_cleared_sums = run_all_sql(sql)\n for clear in case_cleared_sums:\n SummaryBomber.update(new_case_cleared_sum=clear[1]).where(\n SummaryBomber.time == begin_date, SummaryBomber.bomber_id ==\n clear[0]).execute()\n\n\n@time_logger\ndef get_new_case_call(begin_date, end_date, real_query_time=False):\n sql = (\n \"\"\"\n SELECT\n bd.bomber_id,\n count( DISTINCT bd.application_id )\n FROM\n bomber.dispatch_app_history bd\n INNER JOIN bomber.call_actions bc \n ON bd.application_id = bc.application_id \n AND bd.bomber_id = bc.bomber_id \n AND date( bd.entry_at ) = date( bc.created_at ) \n WHERE\n entry_at > '%s' \n AND entry_at < '%s' \n AND partner_id IS NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n new_case_calls = run_all_sql(sql)\n if real_query_time:\n return new_case_calls\n for call in new_case_calls:\n SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber\n .bomber_id == call[0], SummaryBomber.time == begin_date).execute()\n return new_case_calls\n\n\n@time_logger\ndef get_calltime_avg(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction,\n count( 1 ) AS auto_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' ' \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n AND an.recording is not null\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]][0] += data[1]\n result[data[0]][1] += data[2]\n continue\n result[data[0]] = [data[1], data[2]]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=\n value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == key).execute()\n return result\n\n\ndef get_no_calltime_avg(begin_date, end_date):\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND (an.status!='ANSWERED' or an.recording is null) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n for data in manuals:\n SummaryBomber.update(calltime_no_case_sum=data[1],\n calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /\n data[2] if data[2] else 0).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == data[0]).execute()\n\n\n@time_logger\ndef get_calltime_sum(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' '\n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]] += data[1]\n continue\n result[data[0]] = data[1]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == key).execute()\n return result\n\n\n<mask token>\n\n\n@action(MessageAction.UPDATE_SUMMARY_NEW)\ndef update_summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'UPDATE_SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_new_case_amount(begin_date, end_date)\n get_kp_cleared(begin_date, end_date)\n get_kp_today(begin_date, end_date)\n get_ptp_cnt(begin_date, end_date)\n get_ptp_call_cnt(begin_date, end_date)\n get_new_case_cleared(begin_date, end_date)\n get_new_case_call(begin_date, end_date)\n get_calltime_avg(begin_date, end_date)\n get_no_calltime_avg(begin_date, end_date)\n get_calltime_sum(begin_date, end_date)\n\n\ndef get_cycle_claimed(begin_date, end_date):\n sql = (\n \"\"\"\n select cycle,count(1)\n from bomber.application where cycle in (1,2,3,4)\n and (finished_at is null or (finished_at>'%s'))\n and created_at>'2018-09-01'\n group by 1\n \"\"\"\n % begin_date)\n result = run_all_sql(sql)\n return result\n\n\n@time_logger\ndef cycle_new_case(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( ba.id ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.created_at ) = bo.which_day_overdue \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c1b_entry ) = bo.which_day_overdue\n WHERE\n c1b_entry > '%s' \n AND c1b_entry < '%s' \n UNION\n SELECT\n 3 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c2_entry ) = bo.which_day_overdue \n WHERE\n c2_entry > '%s' \n AND c2_entry < '%s' \n UNION\n SELECT\n 4 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c3_entry ) = bo.which_day_overdue\n WHERE\n c3_entry > '%s' \n AND c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n all_datas = run_all_sql(sql)\n if real_time_query:\n return all_datas\n for data in all_datas:\n SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()\n return all_datas\n\n\n@time_logger\ndef get_cycle_new_case_call(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.created_at ) = date( bc.created_at ) \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c1b_entry ) = date( bc.created_at ) \n WHERE\n ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s'\n UNION\n SELECT\n 3 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c2_entry ) = date( bc.created_at ) \n WHERE\n ba.c2_entry > '%s' \n AND ba.c2_entry < '%s'\n UNION\n SELECT\n 4 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c3_entry ) = date( bc.created_at ) \n WHERE\n ba.c3_entry > '%s' \n AND ba.c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n cycle_datas = run_all_sql(sql)\n if real_time_query:\n return cycle_datas\n for data in cycle_datas:\n SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber\n .time == begin_date, SummaryBomber.cycle == data[0], \n SummaryBomber.bomber_id == data[0]).execute()\n return cycle_datas\n\n\ndef get_cycle_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n '1' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.created_at ) = date( br.repay_at ) \n WHERE ba.created_at > '%s' \n AND ba.created_at < '%s' \n GROUP BY 1, 2 ) a \n UNION\n SELECT \n '2' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n GROUP BY 1, 2) a\n \"\"\"\n % (begin_date, end_date, begin_date, end_date))\n cycle_cleared = run_all_sql(sql)\n for i in cycle_cleared:\n SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber\n .cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.\n time == begin_date).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_NEW_CYCLE)\ndef summary_new_cycle(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'SUMMARY_NEW_CYCLE').first()\n if worker_log and worker_log.logs >= 5:\n return\n cycle_datas = SummaryBomber.select(fn.SUM(SummaryBomber.\n new_case_amount_sum).alias('new_case_amount_sum'), fn.SUM(\n SummaryBomber.new_case_cleared_sum).alias('new_case_cleared_sum'),\n fn.SUM(SummaryBomber.case_made_cnt).alias('case_made_cnt'), fn.SUM(\n SummaryBomber.case_connect_cnt).alias('case_connect_cnt'), fn.SUM(\n SummaryBomber.promised_cnt).alias('promised_cnt'), fn.SUM(\n SummaryBomber.promised_amount).alias('promised_amount'), fn.SUM(\n SummaryBomber.cleared_cnt).alias('cleared_cnt'), fn.SUM(\n SummaryBomber.cleared_amount).alias('cleared_amount'), fn.SUM(\n SummaryBomber.new_case_cnt).alias('new_case_cnt'), fn.SUM(\n SummaryBomber.new_case_call_cnt).alias('new_case_call_cnt'), fn.SUM\n (SummaryBomber.unfollowed_cnt).alias('unfollowed_cnt'), fn.SUM(\n SummaryBomber.unfollowed_call_cnt).alias('unfollowed_call_cnt'), fn\n .SUM(SummaryBomber.call_cnt).alias('call_cnt'), fn.SUM(\n SummaryBomber.sms_cnt).alias('sms_cnt'), fn.SUM(SummaryBomber.\n call_connect_cnt).alias('call_connect_cnt'), fn.SUM(SummaryBomber.\n ptp_today_cnt).alias('ptp_today_cnt'), fn.SUM(SummaryBomber.\n ptp_today_call_cnt).alias('ptp_today_call_cnt'), fn.SUM(\n SummaryBomber.ptp_next_cnt).alias('ptp_next_cnt'), fn.SUM(\n SummaryBomber.ptp_next_call_cnt).alias('ptp_next_call_cnt'), fn.SUM\n (SummaryBomber.KP_cleared_cnt).alias('KP_cleared_cnt'), fn.SUM(\n SummaryBomber.KP_today_cnt).alias('KP_today_cnt'), fn.SUM(\n SummaryBomber.work_ind).alias('work_ind'), fn.SUM(SummaryBomber.\n calltime_sum).alias('calltime_sum'), fn.SUM(SummaryBomber.\n calltime_case_sum).alias('calltime_case_sum'), fn.SUM(SummaryBomber\n .calltime_case_cnt).alias('calltime_case_cnt'), fn.SUM(\n SummaryBomber.calltime_no_case_sum).alias('calltime_no_case_sum'),\n fn.SUM(SummaryBomber.calltime_no_case_cnt).alias(\n 'calltime_no_case_cnt'), SummaryBomber.cycle.alias('cycle')).where(\n SummaryBomber.time == begin_date, SummaryBomber.cycle << Cycle.values()\n ).group_by(SummaryBomber.cycle)\n for cycle_data in cycle_datas:\n SummaryBomber.create(bomber_id=cycle_data.cycle, time=begin_date,\n cycle=cycle_data.cycle, new_case_amount_sum=cycle_data.\n new_case_amount_sum, new_case_cleared_sum=cycle_data.\n new_case_cleared_sum, new_case_cleard_rate=0, case_made_cnt=\n cycle_data.case_made_cnt, case_made_rate=0, case_connect_cnt=\n cycle_data.case_connect_cnt, case_connect_rate=0, promised_cnt=\n cycle_data.promised_cnt, promised_amount=cycle_data.\n promised_amount, cleared_cnt=cycle_data.cleared_cnt,\n cleared_amount=cycle_data.cleared_amount, new_case_cnt=\n cycle_data.new_case_cnt, new_case_call_cnt=cycle_data.\n new_case_call_cnt, unfollowed_cnt=cycle_data.unfollowed_cnt,\n unfollowed_call_cnt=cycle_data.unfollowed_call_cnt, call_cnt=\n cycle_data.call_cnt, sms_cnt=cycle_data.sms_cnt,\n call_connect_cnt=cycle_data.call_connect_cnt, calltime_case_avg\n =0, ptp_today_cnt=cycle_data.ptp_today_cnt, ptp_today_call_cnt=\n cycle_data.ptp_today_call_cnt, ptp_next_cnt=cycle_data.\n ptp_next_cnt, ptp_next_call_cnt=cycle_data.ptp_next_call_cnt,\n KP_cleared_cnt=cycle_data.KP_cleared_cnt, KP_today_cnt=\n cycle_data.KP_today_cnt, KP_cleared_rate=0, work_ind=cycle_data\n .work_ind, calltime_sum=cycle_data.calltime_sum,\n calltime_case_sum=cycle_data.calltime_case_sum,\n calltime_case_cnt=cycle_data.calltime_case_cnt,\n calltime_no_case_sum=cycle_data.calltime_no_case_sum,\n calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,\n work_time_sum=cycle_data.work_time_sum)\n cycle_claimed = get_cycle_claimed(begin_date, end_date)\n for claimed in cycle_claimed:\n SummaryBomber.update(claimed_cnt=claimed[1]).where(SummaryBomber.\n time == begin_date, SummaryBomber.cycle == claimed[0], \n SummaryBomber.bomber_id == claimed[0]).execute()\n cycle_new_case(begin_date, end_date)\n get_cycle_new_case_call(begin_date, end_date)\n get_cycle_new_case_cleared(begin_date, end_date)\n get_cycle_case_made_cnt(begin_date, end_date)\n all_datas = SummaryBomber.filter(SummaryBomber.time == begin_date)\n for data in all_datas:\n cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum if\n data.new_case_amount_sum else 0) * 100\n data.new_case_cleard_rate = cl_rat\n case_made_rate = (data.case_made_cnt / data.claimed_cnt if data.\n claimed_cnt else 0) * 100\n data.case_made_rate = case_made_rate\n case_connect_rate = (data.case_connect_cnt / data.case_made_cnt if\n data.case_made_cnt else 0) * 100\n data.case_connect_rate = case_connect_rate\n calltime_case_avg = (data.calltime_case_sum / data.\n calltime_case_cnt if data.calltime_case_cnt else 0)\n data.calltime_case_avg = calltime_case_avg\n calltime_no_case_avg = (data.calltime_no_case_sum / data.\n calltime_no_case_cnt if data.calltime_no_case_cnt else 0)\n data.calltime_no_case_avg = calltime_no_case_avg\n KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt if data.\n KP_today_cnt else 0) * 100\n data.KP_cleared_rate = KP_cleared_rate\n data.save()\n\n\n<mask token>\n\n\ndef get_change_bomber():\n cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle\n .C3.value}\n result = {}\n bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,\n BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=\n BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.\n created_at) == date.today(), BomberLog.role_id << list(\n cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.\n instalment == 0).dicts()\n for b_log in bomber_logs:\n cycle = cycle_role_map.get(b_log['role_id'])\n group_id = b_log['group_id']\n if cycle in result:\n if group_id not in result[cycle]:\n result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}\n else:\n result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}}\n if b_log['operation'] == 0:\n result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])\n if result:\n bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map\n .keys())), Bomber.is_del == 0, Bomber.instalment == 0)\n for b in bombers:\n cycle_result = result.get(cycle_role_map[b.role_id], {})\n role_result = cycle_result.get(b.group_id)\n if not role_result:\n continue\n role_result['new_ids'].append(b.id)\n resutl_list = []\n for cycle, group_dict in result.items():\n resutl_list.extend(list(group_dict.values()))\n return resutl_list\n return []\n\n\n<mask token>\n\n\ndef get_surplus_application(new_ids, del_ids, average_nums, classified_apps):\n surplus_apps = []\n for del_id in del_ids:\n del_res = classified_apps.get(del_id, {})\n p_list = del_res.get('p_list', [])\n np_list = del_res.get('np_list', [])\n del_res['need_num'] = -(len(p_list) + len(np_list))\n del_res['to_list'] = np_list + p_list\n surplus_apps.extend(p_list)\n surplus_apps.extend(np_list)\n for index, bid in enumerate(new_ids):\n average = average_nums[index]\n bomber_app = classified_apps.get(bid)\n if not bomber_app:\n bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid\n ).first()\n bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':\n [], 'np_num': 0, 'need_num': average, 'partner_id': bomber.\n partner_id if bomber else ''}\n classified_apps[bid] = bomber_app\n else:\n p_num = len(bomber_app['p_list'])\n np_num = len(bomber_app['np_list'])\n if p_num > average:\n bomber_app['need_num'] = -np_num\n else:\n bomber_app['need_num'] = average - (p_num + np_num)\n bomber_app['p_num'] = p_num\n bomber_app['np_num'] = np_num\n if bomber_app['need_num'] < 0:\n random.shuffle(bomber_app['np_list'])\n res_over = bomber_app['np_list'][:-bomber_app['need_num']]\n bomber_app['to_list'] = res_over\n surplus_apps.extend(res_over)\n classified_apps_list = sorted(classified_apps.values(), key=lambda x: x\n ['need_num'], reverse=True)\n return surplus_apps, classified_apps_list\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)\ndef bomber_dispatch_applications(payload, msg_id):\n change_bombers = get_change_bomber()\n instalment_change_bombers = get_instalment_change_bomber()\n params = {ApplicationType.CASH_LOAN.value: change_bombers,\n ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}\n for type, bombers in params.items():\n change_bomber_dispatch_apps(change_bombers=bombers, type=type)\n\n\n@action(MessageAction.REPAIR_BOMBER)\ndef repair_bomber(payload, msg_id):\n app_mobile = payload['app_mobile']\n username = payload.get('user_name')\n logging.info('start repair bomber, number: %s' % app_mobile)\n if 'mobile_no' in payload and payload['mobile_no']:\n mobile = number_strip(str(payload['mobile_no']))[:64]\n name = payload.get('mobile_name')\n application = Application.filter(Application.user_mobile_no == mobile)\n if application.exists():\n repair_contact(app_mobile, application, username)\n add_relationship(app_mobile, mobile, username, name)\n if 'tel_no' in payload and payload['tel_no']:\n tel_no = number_strip(str(payload['tel_no']))[:64]\n name = payload.get('tel_name')\n application = Application.filter(Application.user_mobile_no == tel_no)\n if application.exists():\n repair_contact(app_mobile, application, username)\n add_relationship(app_mobile, tel_no, username, name)\n\n\n<mask token>\n\n\ndef get_summary_daily_time():\n mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')\n mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')\n now_date = datetime.now()\n now_date_time = now_date.time()\n today_str = str(now_date.date())\n if now_date_time < mid_time_t1.time():\n yes_date = now_date - timedelta(days=1)\n yes_date_str = str(yes_date.date())\n begin_str = yes_date_str + ' 17:20:00'\n end_str = today_str + ' 00:00:00'\n elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():\n begin_str = today_str + ' 00:00:00'\n end_str = today_str + ' 12:40:00'\n else:\n begin_str = today_str + ' 12:40:00'\n end_str = today_str + ' 17:20:00'\n begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')\n end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')\n summary_datetime = now_date - timedelta(minutes=30)\n summary_date = summary_datetime.date()\n return begin_time, end_time, summary_date\n\n\n@action(MessageAction.SUMMARY_DAILY)\ndef summary_daily_data(payload, msg_id):\n begin_time, end_time, summary_date = get_summary_daily_time()\n call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.\n bomber_id, CallActionsR.application_id, CallActionsR.promised_date,\n CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(\n CallActionsR.created_at >= begin_time, CallActionsR.created_at <\n end_time, CallActionsR.type << (0, 1))\n summary_dailys = {}\n for call in call_actions:\n if call.bomber_id not in summary_dailys:\n summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.\n bomber_id, 'summary_date': str(summary_date)}\n if call.name and call.number:\n summary_dailys[call.bomber_id]['call_cnt'] += 1\n if call.promised_date:\n summary_dailys[call.bomber_id]['ptp_cnt'] += 1\n C1_sql = (\n \"\"\"\n SELECT a.current_bomber_id,\n sum(principal_part+late_fee_part) as pay_amount,a.cycle\n from \n (select a.cycle,a.current_bomber_id,b.username,a.principal_part,\n a.late_fee_part,a.application_id,a.repay_at\n FROM bomber.repayment_log a ,bomber.bomber b\n WHERE a.repay_at >= '%s' AND a.repay_at <'%s'\n AND a.current_bomber_id !=''\n AND a.current_bomber_id = b.id\n and b.role_id in (1,2,4,5)\n and principal_part+late_fee_part>0\n group by 6,7) a\n GROUP BY a.cycle,a.current_bomber_id\n \"\"\"\n % (begin_time, end_time))\n C1_repayment = run_all_sql(C1_sql)\n other_sql = (\n \"\"\"\n select current_bomber_id,sum(pay_amount) as pay_amount,cycle\n from (\n select application_id,current_bomber_id,pay_amount,repay_at,cycle\n from (\n select br.application_id,br.current_bomber_id,\n br.principal_part+br.late_fee_part as pay_amount,br.repay_at,\n br.cycle\n from bomber.repayment_log br\n left join bomber.bomber bb on br.current_bomber_id=bb.id\n where exists (select 1 from bomber.bombing_history bb \n where br.current_bomber_id=bb.bomber_id \n and br.application_id=bb.application_id \n and bb.created_at<br.repay_at \n and (bb.promised_date is not null \n or bb.promised_amount is not null))\n and br.repay_at >= '%s'\n and br.repay_at < '%s'\n and bb.role_id in (3,6,7,8,9) \n and br.principal_part+br.late_fee_part > 0\n group by 1,4\n ) a\n group by 1,4) b\n group by 1\n \"\"\"\n % (begin_time, end_time))\n other_repayment = run_all_sql(other_sql)\n all_repayment = C1_repayment + other_repayment\n for res in all_repayment:\n bomber_id, pay_amount, cycle = res\n if bomber_id in summary_dailys:\n summary_dailys[bomber_id]['repayment'] += pay_amount\n else:\n summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': cycle, 'repayment': pay_amount, 'bomber_id':\n bomber_id, 'summary_date': str(summary_date)}\n insert_values = list(summary_dailys.values())\n if insert_values:\n SummaryDaily.insert_many(insert_values).execute()\n\n\n<mask token>\n\n\ndef get_app_logs(apps):\n app_logs = {}\n all_np_apps = []\n all_p_apps = []\n for a in apps:\n latest_bomber = a['latest_bomber']\n latest_bomber = a['cycle'] if not latest_bomber else latest_bomber\n if latest_bomber in app_logs:\n app_logs[latest_bomber]['to_ids'].append(a['id'])\n else:\n app_logs[latest_bomber] = {'bomber_id': latest_bomber, 'to_ids':\n [a['id']], 'np_ids': [], 'p_ids': []}\n if a['promised_date'] and a['promised_date'].date() >= datetime.now(\n ).date():\n app_logs[latest_bomber]['p_ids'].append(a['id'])\n all_p_apps.append(a)\n else:\n app_logs[latest_bomber]['np_ids'].append(a['id'])\n all_np_apps.append(a)\n return app_logs, all_np_apps, all_p_apps\n\n\ndef month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):\n apps = list(apps)\n np_apps = list(np_apps)\n random.shuffle(np_apps)\n apps_len = len(apps)\n np_apps_len = len(np_apps)\n end = 0\n all_app_precentage = 0\n partners = Partner.select().where(Partner.cycle == cycle, Partner.\n status == PartnerStatus.NORMAL.value)\n for p in partners:\n all_app_precentage += p.app_percentage\n for partner in partners:\n bombers = Bomber.select().where(Bomber.partner == partner.id, \n Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.\n value)\n bids = {b.id: b for b in bombers}\n if len(bids) == 0:\n logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))\n continue\n start = end\n if np_apps_len >= int(apps_len * all_app_precentage):\n end = start + int(apps_len * partner.app_percentage)\n else:\n end = start + int(np_apps_len * partner.app_percentage /\n all_app_precentage)\n partner_app = np_apps[start:end]\n dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)\n np_apps = np_apps[end:]\n return np_apps\n\n\ndef month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):\n sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',\n (4): 'AB_TEST_C3'}\n sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])\n sys_values = json.loads(sys_config.value)\n bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0\n )\n if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):\n bombers = bombers.where(Bomber.instalment == 0)\n bids = {b.id: b for b in bombers}\n if cycle == Cycle.C1A.value:\n np_ids = [a['id'] for a in np_apps]\n np = Application.update(status=ApplicationStatus.PROCESSING.value,\n ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids\n ).execute()\n bomber_app_logs = app_logs.get(cycle, {})\n out_param = {'application_ids': bomber_app_logs.get('to_ids', []),\n 'month_dispatch': 1, 'src_bomber_id': cycle}\n new_out_record(**out_param)\n in_param = {'cycle': cycle, 'application_ids': np_ids,\n 'dest_bomber_id': cycle}\n new_in_record(**in_param)\n bomber_app_logs['need_num'] = len(np_apps)\n bomber_app_logs['form_ids'] = np_ids\n bomber_app_logs['status'] = 1\n else:\n dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)\n dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)\n\n\ndef dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,\n type=ApplicationType.CASH_LOAN.value):\n apps = list(apps)\n random.shuffle(apps)\n bids_list = list(bids.keys())\n if len(bids_list) <= 0:\n logging.info('get_dispatch_app_to_bomber no bids')\n return\n average_num = get_average_number(len(apps), len(bids_list))\n bomber_end = 0\n with db.atomic():\n for index, bid in enumerate(bids_list):\n current_bomber = bids.get(bid)\n bomber_app_logs = app_logs.get(bid, {})\n bomber_start = bomber_end\n bomber_end = bomber_start + average_num[index]\n bomber_apps = apps[bomber_start:bomber_end]\n from_p, from_np, from_ids, status = [], [], [], 0\n for ba in bomber_apps:\n promised_date = ba.get('promised_date')\n from_ids.append(ba['id'])\n if promised_date and promised_date.date() >= date.today():\n from_p.append(ba['id'])\n else:\n from_np.append(ba['id'])\n app_status = ApplicationStatus.AB_TEST.value\n if (cycle == Cycle.C1A.value and not out_partner and type ==\n ApplicationType.CASH_LOAN.value):\n app_status = ApplicationStatus.PROCESSING.value\n if from_p:\n p = Application.update(ptp_bomber=bid, latest_bomber=bid,\n status=app_status).where(Application.id << from_p).execute(\n )\n p_ids = bomber_app_logs.get('p_ids', []) + from_p\n bomber_app_logs['p_ids'] = p_ids\n if from_np:\n np = Application.update(latest_bomber=bid, ptp_bomber=None,\n status=ApplicationStatus.AB_TEST.value).where(\n Application.id << from_np).execute()\n np_ids = bomber_app_logs.get('np_ids', []) + from_np\n bomber_app_logs['np_ids'] = np_ids\n in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.\n partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}\n if type == ApplicationType.CASH_LOAN.value:\n out_param = {'src_bomber_id': bid, 'application_ids':\n bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}\n new_out_record(**out_param)\n new_in_record(**in_param)\n else:\n out_and_in_record_instalment(**in_param)\n bomber_app_logs['status'] = 1\n need_num = bomber_app_logs.get('need_num', 0) + average_num[index]\n bomber_app_logs['need_num'] = need_num\n all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids\n bomber_app_logs['form_ids'] = all_form_ids\n if not out_partner:\n continue\n try:\n DispatchApp.delete().where(DispatchApp.application.in_(\n from_ids)).execute()\n dispatch_ins = [{'application': id, 'partner':\n current_bomber.partner_id, 'bomber': bid, 'status':\n DisAppStatus.NORMAL.value} for id in from_ids]\n DispatchApp.insert_many(dispatch_ins).execute()\n except Exception as e:\n logging.info(\n 'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str\n (e), bid, from_ids))\n\n\n<mask token>\n\n\ndef calc_entry_time(overdue_days):\n app_entry_time = {}\n overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],\n 'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}\n for key, value in overdue_entry.items():\n if value[0] <= overdue_days <= value[1]:\n app_entry_time[key] = datetime.now()\n else:\n app_entry_time[key] = None\n return app_entry_time\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_BOMBER_OVERDUE)\ndef summary_bomber_overdue_everyday(payload, msg_id):\n cycle_list = Cycle.values()\n which_day = date.today()\n for cycle in cycle_list:\n apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,\n ApplicationR.ptp_bomber, ApplicationR.overdue_days,\n ApplicationR.promised_date, ApplicationR.follow_up_date,\n ApplicationR.external_id, OverdueBillR.status, OverdueBillR.\n periods, OverdueBillR.sub_bill_id).join(OverdueBillR,\n JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id\n ).where(ApplicationR.status != ApplicationStatus.REPAID.value, \n ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()\n bomber_overdue_list = []\n for app in apps:\n status = app.get('status')\n if status == ApplicationStatus.REPAID.value:\n continue\n ptp_bomber = app.get('ptp_bomber')\n promised_date = app.get('promised_date')\n follow_up_date = app.get('follow_up_date')\n if not promised_date or promised_date.date() < date.today():\n ptp_bomber = promised_date = None\n if not follow_up_date or follow_up_date.date() < date.today():\n follow_up_date = None\n overdue_dict = {'collection_id': app.get('id'), 'external_id':\n app.get('external_id'), 'sub_bill_id': app.get(\n 'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app\n .get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':\n ptp_bomber, 'promised_date': promised_date,\n 'follow_up_date': follow_up_date, 'which_day': which_day,\n 'overdue_days': app.get('overdue_days')}\n bomber_overdue_list.append(overdue_dict)\n try:\n if bomber_overdue_list:\n with db.atomic():\n for index in range(0, len(bomber_overdue_list), 1000):\n insert_list = bomber_overdue_list[index:index + 1000]\n BomberOverdue.insert_many(insert_list).execute()\n except Exception as e:\n logging.info(\n 'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %\n (cycle, str(which_day), str(e)))\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)\ndef today_ptp_auto_call_switch(payload, msg_id):\n today = datetime.today().date()\n next_day = today + timedelta(days=1)\n apps = ApplicationR.select(ApplicationR.latest_bomber).where(\n ApplicationR.promised_date < next_day, ApplicationR.promised_date >=\n today, ApplicationR.promised_date.is_null(False), ApplicationR.\n status != ApplicationStatus.REPAID.value, ApplicationR.cycle <\n Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(\n ApplicationR.latest_bomber)\n bids = [a.latest_bomber_id for a in apps]\n if not bids:\n return\n q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(\n BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids\n ).execute()\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)\ndef update_today_switch_every_day(payload, msg_id):\n q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(\n BomberPtp.auto_ext.is_null(False)).execute()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\n@action(MessageAction.BOMBER_HEALTH_CHECK)\ndef health_check(payload, msg_id):\n pass\n\n\n<mask token>\n\n\ndef dpd1_process(lst):\n \"\"\"已废弃的方法\"\"\"\n if not lst:\n return\n for key, l in lst.items():\n rule = getattr(BeforeInBomber, key).value\n query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)\n ).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(\n rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.\n call_success()))\n success_set = {i.loanid for i in query}\n failed_list = list(set(l) - success_set)\n post_params = {'$and': rule.get('$and'), 'app_list': failed_list}\n resp = Hyperloop().post('/bomber/score/verify', json=post_params)\n if not resp.ok:\n logging.error('hyperloop score verification failed: %s, %s',\n str(resp.status_code), str(resp.text))\n logging.error('hyperloop score verification failed: %s', str(\n post_params))\n continue\n logging.debug('hyperloop score verification success: %s', resp.content)\n resp_json = resp.json()\n app_list = resp_json['data']\n if not app_list:\n continue\n for item in app_list:\n if random.randint(0, 5) == 1:\n send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':\n int(item)})\n\n\n@action(MessageAction.GET_IVR)\ndef get_ivr(payload, msg_id):\n logging.warning('start get_ivr')\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'DPD1-3_INTO_IVR').first()\n now = date.today()\n if sys_config and sys_config.value:\n start = now - timedelta(days=3)\n else:\n start = now\n end = now + timedelta(days=4)\n item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()\n if not item:\n AutoIVR.delete().execute()\n current_page = 0\n elif item.current_page >= item.total_page:\n return\n else:\n current_page = item.current_page\n auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,\n 'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,\n 'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':\n 10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,\n 'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,\n 'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,\n 'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,\n 'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,\n 'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,\n 'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,\n 'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,\n 'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,\n 'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,\n 'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':\n 101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,\n 'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,\n 'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,\n 'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}\n current_page += 1\n with db.atomic() as transaction:\n while True:\n bill_service = BillService()\n ivr_action = bill_service.ivr_pages(page=current_page,\n page_size=500, start_time=utc_datetime(str(start)),\n end_time=utc_datetime(str(end)))\n result = ivr_action['result']\n page_size = int(ivr_action.get('page_size', 0))\n total_page = int(ivr_action.get('total_page', 0))\n insert_args = []\n for a in result:\n due_at = no_utc_datetime(a['due_at'])\n days = (due_at.date() - now).days\n if days == 2:\n continue\n if days > 0:\n time = str(days)\n else:\n time = str(days).replace('-', 'PDP')\n key = a['app_name'] + str(a['su']) + time\n group = auto_ivr.get(key)\n user_id = a['user_id']\n try:\n user_resp = AccountService().get_user(path_params={\n 'user_id': user_id})\n if str(user_resp['mobile_no']) == str(a['user_mobile_no']):\n numbers = a['user_mobile_no']\n else:\n numbers = a['user_mobile_no'] + ',' + user_resp.get(\n 'mobile_no')\n except:\n logging.error('Request Account Service Error.')\n numbers = a['user_mobile_no']\n insert_args.append({'application_id': a['id'], 'numbers':\n numbers, 'group': group, 'user_id': user_id})\n AutoIVR.insert_many(insert_args).execute()\n if current_page == 1:\n IVRActionLog.create(total_page=total_page, proc_date=now,\n page_size=page_size, current_page=current_page)\n item = IVRActionLog.get(IVRActionLog.proc_date == now)\n else:\n item.current_page = current_page\n item.page_size = page_size\n item.total_page = total_page\n item.save()\n transaction.commit()\n current_page += 1\n if current_page > int(total_page):\n break\n if sys_config and sys_config.value:\n try:\n classfiy_dpd_ptp_apps()\n except Exception as e:\n logging.error('dpd1-3_test_error:%s' % str(e))\n\n\ndef ivr_t2_test():\n t2_groups = [39, 40, 41, 42, 43, 44]\n ivr_test_proportion = 0.2\n sys_config = SystemConfig.select().where(SystemConfig.key ==\n 'IVR_TEST_PROPORTION').first()\n if sys_config and sys_config.value:\n ivr_test_proportion = float(sys_config.value)\n t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.\n status == AutoIVRStatus.AVAILABLE.value)\n t2_dict = defaultdict(list)\n for ivr in t2_ivrs:\n t2_dict[ivr.group].append(ivr.id)\n test_ivr_ids = []\n for group, ivr_ids in t2_dict.items():\n number = ceil(len(ivr_ids) * ivr_test_proportion)\n test_ivr_ids += ivr_ids[:number]\n if not test_ivr_ids:\n return\n q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.\n group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()\n\n\n<mask token>\n\n\n@action(MessageAction.APP_MERGE)\n@deprecated(version='1.0', reason='This function will be removed soon')\ndef app_merge(payload, msg_id):\n sql = \"\"\"\n select *\n from (\n select a.id as id\n from dashboard.application as a\n inner join repayment.bill2 as b on b.external_id = a.id\n where not exists (\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.app = 'DanaCepat'\n and a.is_first_loan = 1\n and a.apply_at < '2018-08-23 20:50:00'\n and b.overdue_days between 1 and 3\n and b.status != 2) result\n where not exists (\n select 1\n from bomber.application as a\n where a.cycle = 1\n and a.status = 4\n and a.id = result.id\n )\n \"\"\"\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n new_data = cursor.fetchall()\n cursor.close()\n if new_data:\n bomber = [103, 104]\n for d in new_data:\n app_id = {'id': d[0]}\n application_overdue(app_id, None)\n Application.update(status=ApplicationStatus.AB_TEST.value,\n latest_bomber=random.choice(bomber), ptp_bomber=None).where(\n Application.id == d[0]).execute()\n logging.warning('add new app success')\n ptp = date.today() - timedelta(days=1)\n del_sql = (\n \"\"\"\n select a.id\n from bomber.application as a\n where exists(\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.cycle = 1\n and a.status = 4\n and (a.promised_date is null or a.promised_date < \"%s\")\n \"\"\"\n % ptp)\n cursor = readonly_db.get_cursor()\n cursor.execute(del_sql)\n del_date = cursor.fetchall()\n cursor.close()\n if del_date:\n return\n ids = list()\n for d in del_date:\n ids.append(d[0])\n Application.update(status=ApplicationStatus.UNCLAIMED.value,\n latest_bomber=None).where(Application.id << ids).execute()\n\n\n@action(MessageAction.APPLICATION_BOMBER)\ndef application_overdue(payload, msg_id):\n application_id = payload['id']\n sub_bill_id = payload['bill_sub_id']\n local_app = Application.select().where(Application.external_id ==\n application_id).order_by(Application.finished_at).first()\n if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:\n logging.info('application %s overdue, already exists', application_id)\n add_contact(local_app)\n return\n if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:\n overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==\n sub_bill_id, OverdueBillR.external_id == application_id)\n if overdue_bill.exists():\n logging.info(\n 'application %s,sub_bill_id %s overdue, already exists' % (\n application_id, sub_bill_id))\n return\n try:\n sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])\n sub_bill = sub_bill[0]\n except Exception:\n logging.error(\n 'application %s overdue, get sub_bill info failed:Request To repayment Error'\n , application_id)\n return\n if sub_bill['status'] == 2:\n logging.error('application %s overdue, but bills already cleared',\n application_id)\n return\n overdue_days = sub_bill.get('overdue_days', 0)\n if overdue_days == 0:\n logging.info('application {} no overdue'.format(str(application_id)))\n return\n gold_eye = GoldenEye().get('/applications/%s' % application_id)\n if not gold_eye.ok:\n logging.error('get application %s failed: Request to GoldenEye.',\n application_id)\n return\n gold_app = gold_eye.json().get('data')\n user_id = gold_app['user_id']\n apply_history = Dashboard().get('/users/%s/apply-history' % user_id)\n if not apply_history.ok:\n logging.error(\n 'get user %s apply history failed: Request to Dashboard Failed.',\n user_id)\n return\n history = apply_history.json().get('data')\n loan_success_times = len([(1) for i in history if i['status'] in [80, \n 90, 100, 70] and i['id'] != gold_app['id']])\n id = application_id\n type = ApplicationType.CASH_LOAN.value\n bill_id = sub_bill.get('bill_id')\n amount = sub_bill.get('amount')\n amount_net = sub_bill.get('amount_net')\n interest_rate = sub_bill.get('interest_rate')\n overdue_days = sub_bill.get('overdue_days')\n origin_due_at = sub_bill.get('origin_due_at')\n sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,\n 'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),\n 'overdue_days': overdue_days, 'origin_due_at': origin_due_at,\n 'amount': amount, 'amount_net': amount_net, 'interest_rate':\n interest_rate, 'external_id': application_id}\n if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:\n if local_app and local_app.status != ApplicationStatus.REPAID.value:\n sub_overdue_bill['collection_id'] = local_app.id\n local_app.amount += amount\n local_app.amount_net += amount_net\n local_app.save()\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('application %s,sub_bill_id:%s overdue created' %\n (application_id, sub_bill_id))\n return\n else:\n id = idg()\n type = ApplicationType.CASH_LOAN_STAGING.value\n sub_overdue_bill['collection_id'] = id\n ptp_info = BombingHistory.filter(BombingHistory.application == id).first()\n promised_amount = ptp_info and ptp_info.promised_amount\n promised_date = ptp_info and ptp_info.promised_date\n application = Application.create(id=id, user_id=gold_app['user_id'],\n user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[\n 'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],\n contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(\n 'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=\n birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(\n gold_app.get('id_ektp')), profile_province=(gold_app.get(\n 'profile_province') or {}).get('name'), profile_city=(gold_app.get(\n 'profile_city') or {}).get('name'), profile_district=(gold_app.get(\n 'profile_district') or {}).get('name'), profile_residence_time=\n gold_app.get('profile_residence_time'), profile_residence_type=\n gold_app.get('profile_residence_type'), profile_address=gold_app.\n get('profile_address'), profile_education=gold_app.get(\n 'profile_education'), profile_college=(gold_app.get(\n 'profile_college') or {}).get('name'), job_name=gold_app.get(\n 'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get\n ('job_bpjs'), job_user_email=gold_app.get('job_user_email'),\n job_type=gold_app.get('job_type'), job_industry=gold_app.get(\n 'job_industry'), job_department=gold_app.get('job_department'),\n job_province=(gold_app.get('job_province') or {}).get('name'),\n job_city=(gold_app.get('job_city') or {}).get('name'), job_district\n =(gold_app.get('job_district') or {}).get('name'), job_address=\n gold_app.get('job_address'), amount=amount, amount_net=amount_net,\n interest_rate=interest_rate, term=gold_app.get('term'),\n origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=\n sub_bill.get('repay_at'), loan_success_times=loan_success_times,\n arrived_at=datetime.now(), follow_up_date=datetime.now(),\n promised_amount=promised_amount, promised_date=promised_date,\n external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=\n datetime.now())\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info('overdue application %s created', application_id)\n Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,\n status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)\n add_contact(application)\n\n\ndef add_contact(application):\n logging.info('start add contact for application: %s', application.id)\n contacts = Contact.filter(Contact.user_id == application.user_id)\n existing_numbers = {contact.number for contact in contacts}\n insert_contacts = list()\n mon_insert_contact = {}\n user_mobile_no = number_strip(application.user_mobile_no)\n if user_mobile_no and user_mobile_no not in existing_numbers:\n insert_contacts.append({'user_id': application.user_id, 'name':\n application.user_name, 'number': user_mobile_no, 'relationship':\n Relationship.APPLICANT.value, 'source': 'apply info',\n 'real_relationship': Relationship.APPLICANT.value})\n existing_numbers.add(number_strip(application.user_mobile_no))\n extra_phone = GoldenEye().get('/users/%s/extra-phone' % application.user_id\n )\n if not extra_phone.ok:\n extra_phone = []\n logging.error('get user %s extra contacts failed', application.user_id)\n else:\n extra_phone = extra_phone.json()['data']\n if extra_phone:\n for i in extra_phone:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({'user_id': application.user_id, 'name':\n application.user_name, 'number': number, 'relationship':\n Relationship.APPLICANT.value, 'source': 'extra phone',\n 'real_relationship': Relationship.APPLICANT.value})\n key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n ec_contact = []\n contact = json.loads(application.contact or '[]')\n for i in contact:\n if number_strip(i['mobile_no']\n ) not in existing_numbers and number_strip(i['mobile_no']):\n ec_contact.append({'user_id': application.user_id, 'name': i[\n 'name'], 'number': number_strip(i['mobile_no']),\n 'relationship': Relationship.FAMILY.value, 'sub_relation':\n SubRelation.EC.value, 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number_strip(i['mobile_no']\n ), ContactType.F_EC.value\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['mobile_no']))\n if i['type'] != 1:\n continue\n if number_strip(i['tel_no']) not in existing_numbers and number_strip(i\n ['tel_no']):\n ec_contact.append({'user_id': application.user_id, 'name': i[\n 'name'], 'number': number_strip(i['tel_no']),\n 'relationship': Relationship.FAMILY.value, 'sub_relation':\n SubRelation.EC.value, 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number_strip(i['tel_no']\n ), ContactType.F_EC.value\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['tel_no']))\n if ec_contact:\n Contact.insert_many(ec_contact).execute()\n if all((application.job_tel, number_strip(application.job_tel), \n number_strip(application.job_tel) not in existing_numbers)):\n insert_contacts.append({'user_id': application.user_id, 'name':\n None, 'number': number_strip(application.job_tel),\n 'relationship': Relationship.COMPANY.value, 'source':\n 'basic info job_tel', 'real_relationship': Relationship.COMPANY\n .value})\n key = user_mobile_no, number_strip(application.job_tel\n ), ContactType.C_BASIC_INFO_JOB_TEL.value\n mon_insert_contact[key] = 1, 0, None\n existing_numbers.add(number_strip(application.job_tel))\n sms_contacts = GoldenEye().get('/applications/%s/sms-contacts' %\n application.external_id)\n if not sms_contacts.ok:\n sms_contacts = []\n logging.info('get user %s sms contacts failed', application.external_id\n )\n else:\n sms_contacts = sms_contacts.json()['data']\n if sms_contacts:\n for i in sms_contacts:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({'user_id': application.user_id, 'name':\n i['name'][:128], 'number': number, 'relationship':\n Relationship.SUGGESTED.value, 'source': 'sms contacts',\n 'real_relationship': Relationship.SUGGESTED.value})\n key = user_mobile_no, number, ContactType.S_SMS_CONTACTS.value\n mon_insert_contact[key] = 1, 0, i['name'][:128]\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n cf = GoldenEye().get('/applications/%s/call/frequency' % application.\n external_id)\n if not cf.ok:\n call_frequency = []\n logging.error('get application %s call frequency error',\n application.external_id)\n else:\n call_frequency = cf.json()['data']\n insert_contacts = []\n fm = GoldenEye().get('/applications/%s/contact/family-member' %\n application.external_id)\n if not fm.ok:\n family = []\n logging.error('get application %s family-member info error',\n application.external_id)\n else:\n family = fm.json()['data']\n if family:\n for i in family:\n if not i.get('number'):\n logging.info('family member %s' % str(i))\n continue\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n logging.info('family members: %s' % str(i))\n insert_contacts.append({'user_id': application.user_id, 'name':\n i['name'][:128], 'number': number, 'relationship':\n Relationship.FAMILY.value, 'source': FamilyContactType.\n CALLEC.value, 'total_count': i.get('total_count', 1),\n 'total_duration': i.get('total_duration', 0),\n 'real_relationship': Relationship.FAMILY.value})\n key = user_mobile_no, number, ContactType.F_CALL_EC.value\n mon_insert_contact[key] = i.get('total_count', 1), i.get(\n 'total_duration', 0), i['name'][:128]\n existing_numbers.add(number)\n mon_update_contact = {}\n if call_frequency:\n with db.atomic():\n count = 1\n for i in call_frequency:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n Contact.update(total_count=i['total_count'],\n total_duration=i['total_duration']).where(Contact.\n number == number, Contact.user_id == application.\n user_id)\n key = user_mobile_no, number\n mon_update_contact[key] = i['total_count'], i[\n 'total_duration']\n continue\n if count < 6:\n insert_contacts.append({'user_id': application.user_id,\n 'name': i['name'][:128], 'number': number,\n 'relationship': Relationship.FAMILY.value,\n 'total_count': i['total_count'], 'total_duration':\n i['total_duration'], 'source': FamilyContactType.\n CALLTOP5.value, 'real_relationship': Relationship.\n FAMILY.value})\n count += 1\n key = user_mobile_no, number, ContactType.F_CALL_TOP5.value\n mon_insert_contact[key] = i['total_count'], i[\n 'total_duration'], i['name'][:128]\n else:\n insert_contacts.append({'user_id': application.user_id,\n 'name': i['name'][:128], 'number': number,\n 'relationship': Relationship.SUGGESTED.value,\n 'total_count': i['total_count'], 'total_duration':\n i['total_duration'], 'source': 'call frequency',\n 'real_relationship': Relationship.SUGGESTED.value})\n key = (user_mobile_no, number, ContactType.\n S_CALL_FREQUENCY.value)\n mon_insert_contact[key] = i['total_count'], i[\n 'total_duration'], i['name'][:128]\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n next_apply_list = AccountService().add_contact(application.user_id)\n for next_apply in next_apply_list:\n number = number_strip(str(next_apply))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=application.\n user_name, number=number, relationship=Relationship.\n SUGGESTED.value, source='online profile phone',\n real_relationship=Relationship.SUGGESTED.value)\n key = (user_mobile_no, number, ContactType.\n S_ONLINE_PROFILE_PHONE.value)\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n next_applicant = GoldenEye().get('/bomber/%s/dual_contact' %\n application.user_id)\n if not next_applicant.ok:\n next_applicant = []\n logging.error('get user %s dual_contact contacts failed' %\n application.user_id)\n else:\n next_applicant = next_applicant.json()['data']\n if next_applicant:\n for i in next_applicant:\n number = number_strip(str(i))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=\n application.user_name, number=number, relationship=\n Relationship.APPLICANT.value, source='apply info',\n real_relationship=Relationship.APPLICANT.value)\n key = user_mobile_no, number, ContactType.A_APPLY_INFO.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n numbers = []\n try:\n numbers = AccountService().ktp_number(path_params={'user_id':\n application.user_id})\n except Exception as e:\n logging.info('request ktp numbers failed %s' % str(e))\n for n in numbers:\n number = number_strip(str(n))[:64]\n if number and number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=application.\n user_name, number=number, relationship=Relationship.\n APPLICANT.value, source='ktp number', real_relationship=\n Relationship.APPLICANT.value)\n key = user_mobile_no, number, ContactType.A_KTP_NUMBER.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n try:\n ecs = GoldenEye().get('/applications/%s/contact/ec' % application.\n external_id)\n except Exception as e:\n logging.info('request ec-member error: %s' % str(e))\n try:\n if not ecs.ok:\n ec = []\n logging.info('get application %s ec-member info error',\n application.external_id)\n else:\n ec = ecs.json()['data']\n if ec:\n for e in ec:\n number = number_strip(e['numbers'])[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=e[\n 'name'][:128], number=number, relationship=\n Relationship.FAMILY.value, source=FamilyContactType\n .CONTACTEC.value, real_relationship=Relationship.\n FAMILY.value)\n key = (user_mobile_no, number, ContactType.F_CONTACT_EC\n .value)\n mon_insert_contact[key] = 1, 0, e['name'][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add ec_member error:%s' % str(e))\n try:\n mn = GoldenEye().get('/applications/%s/contact/my_number' %\n application.external_id)\n except Exception as e:\n logging.info('request my_number error: %s' % str(e))\n try:\n if not mn.ok:\n my = []\n logging.info('get application %s my_number info error',\n application.external_id)\n else:\n my = mn.json()['data']\n if my:\n for m in my:\n number = number_strip(m)[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=my[m][\n :128], number=number, relationship=Relationship.\n SUGGESTED.value, source='my number',\n real_relationship=Relationship.SUGGESTED.value)\n key = user_mobile_no, number, ContactType.S_MY_NUMBER.value\n mon_insert_contact[key] = 1, 0, my[m][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add my_member error:%s' % str(e))\n try:\n cn = GoldenEye().get('/applications/%s/contact/company-number' %\n application.external_id)\n except Exception as e:\n logging.info('request company-number error: %s' % str(e))\n try:\n if not cn.ok:\n cn = []\n logging.info('get application %s company_number info error',\n application.external_id)\n else:\n cn = cn.json()['data']\n if cn:\n for c in cn:\n number = c\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=cn[c][\n :128], number=number, relationship=Relationship.\n COMPANY.value, source='company', real_relationship=\n Relationship.COMPANY.value)\n key = user_mobile_no, number, ContactType.C_COMPANY.value\n mon_insert_contact[key] = 1, 0, cn[c][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add company_member error:%s' % str(e))\n try:\n ol = AccountService().other_login_contact(userId=application.user_id)\n except Exception as e:\n logging.error('request other_login error: %s' % e)\n ol = {}\n try:\n for o in ol:\n number = number_strip(o)\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(user_id=application.user_id, name=ol[o][:128\n ], number=number, relationship=Relationship.SUGGESTED.\n value, source='other_login', real_relationship=\n Relationship.SUGGESTED.value)\n key = user_mobile_no, number, ContactType.S_OTHER_LOGIN.value\n mon_insert_contact[key] = 1, 0, ol[o][:128]\n except Exception as e:\n logging.error('add other_login number error:%s' % e)\n logging.info('add contact for application %s finished', application.id)\n if mon_insert_contact or mon_update_contact:\n send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON, {\n 'user_mobile_no': user_mobile_no, 'insert_contact': str(\n mon_insert_contact), 'update_contact': str(mon_update_contact),\n 'user_id': application.user_id, 'name': application.user_name})\n\n\n<mask token>\n\n\ndef get_contact_from_mongo(number):\n if not number:\n return []\n query = TotalContact.objects(src_number=number, source__in=TotalContact\n .available()).order_by('source')\n lst = []\n for c in query:\n relation = TotalContact.relationship(c.source)\n if relation == -1:\n continue\n source = TotalContact.str_source(c.source)\n if not source:\n continue\n lst.append({'related_number': c.dest_number, 'source': source,\n 'is_calc': c.is_calc, 'total_count': c.total_count,\n 'total_duration': c.total_duration, 'relation': relation,\n 'name': c.dest_name})\n return lst\n\n\n<mask token>\n\n\ndef check_key_not_none(payload, keys):\n for key in keys:\n if payload.get(key) is None:\n logging.error('Missing args {}'.format(str(key)))\n return False\n return True\n\n\n@action(MessageAction.BILL_PAID)\ndef bill_paid(payload, msg_id):\n validate = check_key_not_none(payload, ['external_id', 'late_fee_part',\n 'principal_part', 'paid_at', 'bill_sub_id', 'partner_bill_id'])\n if not validate:\n logging.error('payload key not fully pass in.')\n return\n external_id = payload['external_id']\n late_fee_part = Decimal(payload['late_fee_part'])\n principal_part = Decimal(payload['principal_part'])\n paid_at = payload['paid_at']\n partner_bill_id = payload['partner_bill_id']\n logging.debug(\n 'application %s paid principal part %s, paid late fee part %s',\n external_id, principal_part, late_fee_part)\n application = Application.filter(Application.external_id == external_id\n ).order_by(-Application.created_at).first()\n if not application:\n logging.info('application %s paid, not found application', external_id)\n return\n sub_bill_id = payload['bill_sub_id']\n overdue_bill = OverdueBillR.select().where(OverdueBillR.collection_id ==\n application.id, OverdueBillR.sub_bill_id == sub_bill_id).first()\n if (application.type == ApplicationType.CASH_LOAN_STAGING.value and not\n overdue_bill):\n logging.info('bill sub not in bomber %s', sub_bill_id)\n return\n with db.atomic():\n repay_at = str_no_utc_datetime(payload['latest_repay_at'])\n Application.update(repay_at=repay_at).where(Application.id ==\n application.id).execute()\n RepaymentLog.create(application=application.id, is_bombed=True,\n current_bomber=application.latest_bomber_id, cycle=application.\n cycle, principal_part=principal_part, late_fee_part=\n late_fee_part, repay_at=paid_at, ptp_bomber=application.\n ptp_bomber, latest_call=application.latest_call, periods=\n overdue_bill.periods if overdue_bill else None, overdue_bill_id\n =overdue_bill.id if overdue_bill else None, partner_bill_id=\n partner_bill_id)\n phone_status = PhoneStatus.CONNECTED.value\n real_relationship = RealRelationship.user_values()\n commit = CallActionCommit.NO.value\n number = CallActions.select(CallActions.number).where(CallActions.\n phone_status == phone_status, CallActions.real_relationship <<\n real_relationship, CallActions.commit == commit, CallActions.\n application == application.id).order_by(-CallActions.created_at\n ).first()\n if number:\n Contact.update(call_priority=PriorityStatus.REPAY.value).where(\n Contact.user_id == application.user_id, Contact.\n call_priority == PriorityStatus.LAST.value).execute()\n Contact.update(call_priority=PriorityStatus.LAST.value).where(\n Contact.user_id == application.user_id, Contact.number ==\n number.number).execute()\n if not application.latest_bomber_id:\n return\n Inbox.create(title='application %s,sub_bill_id %s repaid' % (\n application.external_id, sub_bill_id), content=\n 'application %s,sub_bill_id %s repaid' % (application.\n external_id, sub_bill_id), receiver=application.\n latest_bomber_id or application.last_bomber_id, category=\n InboxCategory.REPAID.value)\n\n\n@action(MessageAction.BILL_RELIEF)\ndef bill_relief(payload, msg_id):\n \"\"\"已废弃\"\"\"\n bill = payload['head_bill']\n repay_at = str_no_utc_datetime(bill['latest_repay_at'])\n updated_row = Application.update(repay_at=repay_at).where(Application.\n id == bill['external_id']).execute()\n logging.info('application %s bill relief done', bill['external_id'])\n return updated_row\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)\ndef calc_overdue_days_over(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query = Application.update(overdue_days=overdue_days).where(Application\n .status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.\n overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)\n updated_rows_count = query.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count)\n try:\n calc_overdue_days_over_instalment()\n except Exception as e:\n logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))\n apps = Application.filter(Application.status << [ApplicationStatus.\n UNCLAIMED.value, ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value], Application.overdue_days > 95, \n Application.promised_date.is_null(True) | (fn.DATE(Application.\n promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {\n 'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n\n\ndef calc_overdue_days_over_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days > 95)\n updated_rows_count = query.execute()\n logging.info(\n 'calc_overdue_days_over_instalment done,count:%s,status:%s' % (\n updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.collection_id,\n OverdueBill.overdue_days).join(Application, JOIN_LEFT_OUTER, on\n =OverdueBill.collection_id == Application.id).where(Application\n .status == status, Application.type == ApplicationType.\n CASH_LOAN_STAGING.value)\n app_update = {}\n for ob in overdue_bills:\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)\ndef calc_overdue_days(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query_unclaimed = Application.update(overdue_days=overdue_days).where(\n Application.status == ApplicationStatus.UNCLAIMED.value, \n Application.overdue_days <= 95, Application.type == ApplicationType\n .CASH_LOAN.value)\n updated_rows_count_unclaimed = query_unclaimed.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_unclaimed)\n query_processing = Application.update(overdue_days=overdue_days).where(\n Application.status == ApplicationStatus.PROCESSING.value, \n Application.overdue_days <= 95, Application.type == ApplicationType\n .CASH_LOAN.value)\n updated_rows_count_processing = query_processing.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_processing)\n query_test = Application.update(overdue_days=overdue_days).where(\n Application.status == ApplicationStatus.AB_TEST.value, Application.\n overdue_days <= 95, Application.type == ApplicationType.CASH_LOAN.value\n )\n updated_rows_count_test = query_test.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_test)\n calc_overdue_days_instalment()\n apps = Application.select(Application.id).where(Application.status << [\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value, ApplicationStatus.AB_TEST.value], Application.overdue_days <=\n 95, Application.promised_date.is_null(True) | (fn.DATE(Application.\n promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {\n 'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n Application.update(C1A_entry=datetime.now()).where(Application.status <<\n [ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value, ApplicationStatus.AB_TEST.value], Application.overdue_days == 4\n ).execute()\n\n\ndef calc_overdue_days_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]\n today_now_time = datetime.now()\n month_first_day = today_now_time.replace(day=1, hour=1, minute=30,\n second=0, microsecond=0)\n for status in sub_bill_status_list:\n query = OverdueBill.update(overdue_days=overdue_days).where(\n OverdueBill.status == status, OverdueBill.overdue_days <= 95)\n updated_rows_count = query.execute()\n logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %\n (updated_rows_count, status))\n overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.\n created_at, OverdueBill.collection_id, OverdueBill.overdue_days\n ).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.\n collection_id == Application.id).where(Application.status ==\n status, Application.type == ApplicationType.CASH_LOAN_STAGING.value\n )\n app_update = {}\n for ob in overdue_bills:\n if (ob.status == ApplicationStatus.REPAID.value and ob.\n created_at < month_first_day):\n continue\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n for aid, a_days in app_update.items():\n q = Application.update(overdue_days=a_days).where(Application.\n id == aid).execute()\n logging.info('update instalment application done')\n\n\n@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)\ndef automatic_escalation(payload, msg_id):\n app_ids = payload.get('application_list', [])\n if not app_ids:\n return\n apps = Application.select().where(Application.id.in_(app_ids), \n Application.status != ApplicationStatus.REPAID.value)\n for a in apps:\n new_cycle = application_entry_different_calculations(a)\n if a.overdue_days < 90:\n logging.info(\n 'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'\n .format(a.id, new_cycle, a.cycle, a.overdue_days))\n if new_cycle > a.cycle:\n with db.atomic():\n if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle\n .C1B.value):\n bomber_id = (a.latest_bomber_id if a.latest_bomber_id else\n a.cycle)\n DispatchAppHistory.update(out_at=datetime.now(),\n out_overdue_days=a.overdue_days).where(\n DispatchAppHistory.application == a.id, \n DispatchAppHistory.bomber_id == bomber_id).execute()\n Escalation.create(application=a.id, type=EscalationType.\n AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,\n current_cycle=a.cycle, escalate_to=new_cycle,\n current_bomber_id=a.latest_bomber)\n dis_app_update = DispatchApp.update(status=DisAppStatus.\n ABNORMAL.value).where(DispatchApp.application == a.id)\n dis_app_update.execute()\n a.cycle = new_cycle\n a.last_bomber = a.latest_bomber\n a.status = ApplicationStatus.UNCLAIMED.value\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n if new_cycle == Cycle.C1B.value:\n a.C1B_entry = datetime.now()\n elif new_cycle == Cycle.C2.value:\n a.C2_entry = datetime.now()\n elif new_cycle == Cycle.C3.value:\n a.C3_entry = datetime.now()\n a.save()\n logging.info('automatic escalation done')\n\n\ndef application_entry_different_calculations(app):\n conf = {(1): [1, 10], (2): [11, 30], (3): [31, 60], (4): [61, 90], (5):\n [91, 999999]}\n for new_cycle, scopes in conf.items():\n if scopes[0] <= app.overdue_days <= scopes[1]:\n return new_cycle\n return app.cycle\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY)\ndef cron_summary(payload, msg_id):\n \"\"\"已废弃\"\"\"\n employees = Bomber.select(Bomber, Role).join(Role)\n summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,\n 'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,\n 'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':\n 0, 'sms_sent': 0} for i in employees}\n now_date = date.today()\n cal_date = now_date - timedelta(days=1)\n claimed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('claimed')).where(fn.DATE(Application.\n claimed_at) == cal_date, Application.status << [ApplicationStatus.\n PROCESSING.value, ApplicationStatus.REPAID.value], Application.\n latest_bomber.is_null(False)).group_by(Application.latest_bomber)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False)).group_by(\n Application.latest_bomber)\n completed = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('completed')).where(Application.\n latest_bombing_time.is_null(False), fn.DATE(Application.\n latest_bombing_time) == cal_date, Application.latest_bomber.is_null\n (False)).group_by(Application.latest_bomber)\n escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(\n Escalation.id).alias('escalated')).where(fn.DATE(Escalation.\n created_at) == cal_date, Escalation.type == EscalationType.\n AUTOMATIC.value, Escalation.current_bomber.is_null(False), \n Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation\n .current_bomber)\n transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).\n alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==\n cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(\n Transfer.operator)\n promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(\n BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.\n created_at) == cal_date, BombingHistory.result == BombingResult.\n HAS_PROGRESS.value).group_by(BombingHistory.bomber)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,\n RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.\n current_bomber)\n calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id\n ).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==\n cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)\n calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.\n record_id).alias('calls_connected')).where(fn.DATE(CallLog.\n time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==\n '1').group_by(CallLog.user_id)\n sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(\n ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_\n (ConnectType.sms()), ConnectHistory.created_at >= cal_date, \n ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)\n for i in claimed:\n summary[i.latest_bomber_id]['claimed'] += i.claimed\n for i in completed:\n summary[i.latest_bomber_id]['completed'] += i.completed\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n for i in escalated:\n summary[i.current_bomber_id]['escalated'] += i.escalated\n for i in transferred:\n summary[i.operator_id]['transferred'] += i.transferred\n for i in promised:\n summary[i.bomber_id]['promised'] += i.promised\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in calls_made:\n summary[int(i.user_id)]['calls_made'] += i.calls_made\n for i in calls_connected:\n summary[int(i.user_id)]['calls_connected'] += i.calls_connected\n for i in sms_sent:\n summary[i.operator_id]['sms_sent'] += i.sms_sent\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'claimed': data['claimed'], 'completed': data['completed'],\n 'cleared': data['cleared'], 'escalated': data['escalated'],\n 'transferred': data['transferred'], 'promised': data['promised'\n ], 'amount_recovered': data['amount_recovered'], 'calls_made':\n data['calls_made'], 'calls_connected': data['calls_connected'],\n 'sms_sent': data['sms_sent'], 'date': cal_date})\n if insert_args:\n Summary.insert_many(insert_args).execute()\n cycle_args = []\n escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(\n Escalation.id).alias('escalated_in')).where(Escalation.status ==\n ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==\n cal_date).group_by(Escalation.escalate_to)\n for i in escalated_in:\n cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.\n escalated_in, 'date': cal_date})\n amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM\n (RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)\n for i in amount_recovered_total:\n amount_recovered_total = i.principal_part + i.late_fee_part\n cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':\n amount_recovered_total, 'date': cal_date})\n if cycle_args:\n Summary.insert_many(cycle_args).execute()\n logging.info('cal summary done')\n send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY2)\ndef cron_summary2(payload, msg_id):\n \"\"\"已废弃,定时任务还在执行,具体情况待确定\"\"\"\n cal_date = date.today() - timedelta(days=1)\n employees = Bomber.select(Bomber, Role).join(Role)\n auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,\n AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')\n ).where(fn.DATE(AutoCallActions.created_at) == cal_date)\n amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.\n SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(\n RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(\n RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.\n is_null(False), RepaymentLog.is_bombed == True)\n cleared = Application.select(Application.latest_bomber, fn.COUNT(\n Application.id).alias('cleared')).where(fn.DATE(Application.\n finished_at) == cal_date, Application.status == ApplicationStatus.\n REPAID.value, Application.latest_bomber.is_null(False))\n auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,\n AutoCallActions.result)\n amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)\n cleared = cleared.group_by(Application.latest_bomber)\n summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,\n 'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered': \n 0} for e in employees}\n for a in auto_call_actions:\n summary[a.bomber_id]['answered_calls'] += a.count\n if a.result == AutoCallResult.PTP.value:\n summary[a.bomber_id]['ptp'] += a.count\n if a.result == AutoCallResult.FOLLOW_UP.value:\n summary[a.bomber_id]['follow_up'] += a.count\n if a.result == AutoCallResult.NOT_USEFUL.value:\n summary[a.bomber_id]['not_useful'] += a.count\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],\n 'answered_calls': data['answered_calls'], 'ptp': data['ptp'],\n 'follow_up': data['follow_up'], 'not_useful': data['not_useful'\n ], 'cleared': data['cleared'], 'amount_recovered': str(data[\n 'amount_recovered']), 'date': cal_date})\n if insert_args:\n Summary2.insert_many(insert_args).execute()\n\n\n@action(MessageAction.BOMBER_SYNC_CONTACTS)\ndef sync_suggested_contacts(payload, msg_id):\n \"\"\" suggested contacts sync \"\"\"\n applications = Application.select(Application.id, Application.user_id\n ).where(Application.status << [ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value])\n logging.debug('start sync contact')\n for a in applications:\n sync_contacts(a)\n logging.info('contact sync finished')\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_DISCOUNT_APPROVED)\ndef bomber_discount_approved(payload, msg_id):\n app_id = payload['id']\n msg_type = payload['msg_type']\n discount_to = payload['discount_to']\n effective_to = payload['effective_to']\n application = Application.filter(Application.id == app_id).first()\n if not application:\n logging.error(\n 'discount approved msg send failed application %s not found',\n app_id)\n return\n template = Template.select(Template.text, Template.app).where(Template.\n type == ConnectType.AUTO_SMS.value, Template.id << Template.\n get_auto_sms_tpl(msg_type), Template.app == application.app).first()\n if not template:\n logging.error('discount approved msg send failed template %s not found'\n , msg_type)\n return\n promised_date = None\n if application.promised_date:\n promised_date = application.promised_date.strftime('%d-%m-%Y')\n tpl_data = {'user_name': application.user_name, 'due_days': application\n .overdue_days, 'app_name': application.app, 'phone': application.\n user_mobile_no, 'cs_number': cs_number_conf.get(application.app,\n '02150202889'), 'promised_date': promised_date, 'discount_to':\n discount_to, 'effective_to': effective_to}\n content = template.text.format(**tpl_data)\n data_list = [{'receiver': '62' + application.user_mobile_no, 'content':\n content, 'title': ''}]\n send_sms(data_list, msg_type, application.app)\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST)\ndef bomber_auto_call_list(payload, msg_id):\n with db.atomic():\n bomber_dispatch_app()\n dispatch_instalment_app()\n dis_apps = DispatchApp.select(DispatchApp.application).where(\n DispatchApp.status == DisAppStatus.NORMAL.value)\n c1_apps = Application.select(Application.id, Application.cycle,\n Application.follow_up_date, Application.called_times).where(Application\n .status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.\n AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.\n is_rejected == False, Application.promised_date.is_null(True) | (fn\n .DATE(Application.promised_date) < datetime.today().date())).order_by(\n Application.overdue_days, Application.apply_at)\n dis_apps_ids = [da.application_id for da in dis_apps]\n insert_args = []\n for a in c1_apps:\n if a.id in dis_apps_ids:\n continue\n insert_args.append({'application': a.id, 'cycle': a.cycle,\n 'follow_up_date': a.follow_up_date, 'called_times': 1 if a.\n called_times else 0, 'description': 'init'})\n if not insert_args:\n logging.error('no application need auto call')\n with db.atomic():\n AutoCallList.delete().execute()\n for idx in range(0, len(insert_args), 100):\n AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()\n for idx in range(0, len(insert_args), 100):\n application_list = [i['application'] for i in insert_args[idx:idx +\n 100]]\n send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {\n 'application_list': application_list})\n logging.info('bomber generate auto call list finished')\n send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})\n\n\nclass ChangeBomberTool(object):\n\n @staticmethod\n def in_record(bomber_id, ids, bd):\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.\n alias('application_id'), R(str(bomber_id)).alias('bomber_id'),\n fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(\n 'expected_out_time'), Application.overdue_days.alias(\n 'entry_overdue_days')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << ids)\n Application.update(latest_bomber=bomber_id).where(Application.id.\n in_(ids)).execute()\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bd),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n @staticmethod\n def out_record(a, bd):\n _id = str(a.id)\n DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a\n .overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]\n .get('principal_paid')), out_late_fee_pending=bd[_id].get(\n 'late_fee') - bd[_id].get('late_fee_paid')).where(\n DispatchAppHistory.application == a.id, DispatchAppHistory.\n bomber_id == a.latest_bomber_id).execute()\n a.last_bomber = a.latest_bomber\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n a.save()\n\n @staticmethod\n def classify(l, b):\n if len(l) == 1:\n return l[0]\n _l = filter(lambda x: x['bomber'] != b, l)\n return min(_l, key=lambda x: len(x['ids']))\n\n\n<mask token>\n\n\ndef bomber_dispatch_app():\n try:\n c1a_dispatch_app()\n except Exception as e:\n logging.error('c1a_dispatch_app error:%s' % str(e))\n cycle = {(1): 10, (2): 30, (3): 60, (4): 90}\n apps = Application.select().where(fn.DATE(Application.C2_entry) == date\n .today(), Application.type == ApplicationType.CASH_LOAN.value)\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C2.value)\n apps_ids = [a.id for a in apps]\n dispatch_inserts = []\n start_index = 0\n apps_length = len(apps_ids)\n logging.warning('apps length %s' % str(apps_length))\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)\n gen = CycleIter([b.id for b in bombers])\n existing_list = []\n end_index = start_index + int(apps_length * p.app_percentage)\n logging.info('partner length %s' % str(end_index))\n if not apps_ids[start_index:end_index]:\n continue\n bills = BillService().bill_list(application_ids=apps_ids[\n start_index:end_index])\n bill_dict = {bill['application_id']: bill for bill in bills}\n for a_id in apps_ids[start_index:end_index]:\n bomber = average_gen(gen, existing_list)\n q = DispatchApp.delete().where(DispatchApp.application == a_id\n ).execute()\n dispatch_inserts.append({'application': a_id, 'bomber': bomber,\n 'partner': p.id})\n application = Application.select().where(Application.id == a_id\n ).first()\n application.latest_bomber = bomber\n application.status = ApplicationStatus.AB_TEST.value\n application.ptp_bomber = None\n application.save()\n day_next_cycle = cycle.get(application.cycle\n ) - application.overdue_days\n DispatchAppHistory.create(application=a_id, partner_id=p.id,\n bomber_id=bomber, entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - Decimal(\n bill_dict[a_id].get('principal_paid')),\n entry_late_fee_pending=Decimal(bill_dict[a_id].get(\n 'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')\n ), expected_out_time=date.today() + timedelta(days=\n day_next_cycle))\n start_index = end_index\n with db.atomic():\n for idx in range(0, len(dispatch_inserts), 100):\n DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()\n config = SystemConfig.prefetch(SCI.AB_TEST_C2)\n c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)\n c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)\n c2 = apps_ids[start_index:]\n if c2:\n bills = BillService().bill_list(application_ids=c2)\n else:\n bills = []\n bill_dict = {bill['application_id']: bill for bill in bills}\n logging.info('c2 AB_test length: %s' % str(c2))\n gen = CycleIter(c2_bomber)\n existing_list = []\n for c in c2:\n bomber = average_gen(gen, existing_list)\n application = Application.filter(Application.id == c).first()\n application.status = ApplicationStatus.AB_TEST.value\n application.latest_bomber = bomber\n application.ptp_bomber = None\n application.save()\n day_next_cycle = 46 - application.overdue_days\n DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at\n =datetime.now(), entry_overdue_days=application.overdue_days,\n entry_principal_pending=application.amount - bill_dict[c].get(\n 'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(\n 'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),\n expected_out_time=date.today() + timedelta(days=day_next_cycle))\n ab_test_other()\n\n\ndef c1a_dispatch_app():\n today = datetime.today().date()\n tomorrow = today + timedelta(days=1)\n c1a_apps = Application.select().where(Application.status << [\n ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.\n value], Application.dpd1_entry >= today, Application.dpd1_entry <\n tomorrow, Application.type == ApplicationType.CASH_LOAN.value)\n all_aids = [a.id for a in c1a_apps]\n partners = Partner.select().where(Partner.status == PartnerStatus.\n NORMAL.value, Partner.cycle == Cycle.C1A.value)\n end = 0\n for p in partners:\n bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.\n is_del == 0)\n start = end\n end += int(len(all_aids) * p.app_percentage)\n aids = all_aids[start:end]\n bids = [b.id for b in bombers]\n if not bids or not aids:\n continue\n average_number = get_average_number(len(aids), len(bids))\n p_end = 0\n for i, bid in enumerate(bids):\n p_start = p_end\n p_end += average_number[i]\n b_aids = aids[p_start:p_end]\n with db.atomic():\n q = Application.update(latest_bomber=bid, status=\n ApplicationStatus.AB_TEST.value).where(Application.id <<\n b_aids).execute()\n params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,\n 'application_ids': b_aids, 'dest_bomber_id': bid}\n new_in_record(**params)\n try:\n dispatch_inserts = []\n for aid in b_aids:\n dispatch_inserts.append({'application': aid, 'bomber':\n bid, 'partner': p.id, 'status': DisAppStatus.NORMAL\n .value})\n if dispatch_inserts:\n q = DispatchApp.insert_many(dispatch_inserts).execute()\n except Exception as e:\n logging.error('c1a分件写入dispatch_app error:%s' % str(e))\n\n\n<mask token>\n\n\ndef get_cash_bomber(bids, cycle):\n cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==\n 0, Bomber.instalment != cycle)\n cash_bids = [b.id for b in cash_bombers]\n return cash_bids\n\n\n<mask token>\n\n\ndef out_and_in_record_instalment(**kwargs):\n if not kwargs.get('application_ids'):\n return\n out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(\n DispatchAppHistory.application << kwargs['application_ids'],\n DispatchAppHistory.out_at.is_null(True)).execute()\n cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}\n period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')\n kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(kwargs['dest_bomber_id'])).alias(\n 'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.\n alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).\n alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << kwargs[\n 'application_ids'])\n application_list = list(subquery)\n for idx in range(0, len(application_list), 50):\n applications = application_list[idx:idx + 50]\n app_ids = [i.application_id for i in applications]\n overdue_bills = OverdueBill.select().where(OverdueBill.\n collection_id << app_ids)\n sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]\n bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)\n insert_args = lambad_instalment_result(bill_list, applications)\n if not insert_args:\n continue\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef c1b_dispatch_in_record(**kwargs):\n app_ids = kwargs.get('apps')\n partner_id = kwargs.get('partner_id', 'null')\n bill_dict = kwargs.get('bill_dict')\n period = kwargs.get('period')\n bomber_id = kwargs.get('bomber_id')\n if not all([app_ids, partner_id, bill_dict, period]):\n return False\n bill_dict = {str(k): v for k, v in bill_dict.items()}\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().\n alias('entry_at'), Application.overdue_days.alias(\n 'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(\n 'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)\n .alias('expected_out_time')).where(Application.id << app_ids)\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_CONTACT)\ndef bomber_auto_call_contact(payload, msg_id):\n application_list = payload['application_list']\n applications = []\n for app_id in application_list:\n applications.append(Application.filter(Application.id == app_id).\n first())\n with db.atomic():\n for application in applications:\n cycle = application.cycle\n contacts = Contact.select().where(Contact.user_id ==\n application.user_id, Contact.latest_status.not_in(\n ContactStatus.no_use())).order_by(-Contact.useful, Contact.\n relationship, -Contact.total_duration, -Contact.total_count)\n level1 = []\n level2 = []\n level3 = []\n level = []\n for c in contacts:\n if c.relationship == Relationship.APPLICANT.value:\n level.append(c)\n elif c.relationship == Relationship.FAMILY.value:\n level1.append(c)\n elif c.relationship == Relationship.COMPANY.value:\n level2.append(c)\n elif c.relationship == Relationship.SUGGESTED.value:\n level3.append(c)\n contacts = level + level2 + level1 + level3\n numbers = []\n fc_count = 0\n app_calls = []\n need_verify = False\n for eac_contact in contacts:\n if (eac_contact.relationship == Relationship.FAMILY.value and\n eac_contact.useful == ContactsUseful.NONE.value):\n need_verify = True\n break\n if need_verify:\n logging.info('Found contact need update. app id {}'.format(\n str(application.id)))\n app_calls = AuditService().phone_invalid(cat=Relationship(1\n ).name, application_id=application.external_id)\n call_history = True\n c1b_family_dict = defaultdict(list)\n for c in contacts:\n if c.relationship == Relationship.COMPANY.value:\n if cycle == Cycle.C1A.value:\n call_history = check_call_history(application)\n break\n if cycle == Cycle.C1B.value:\n if (c.source != CompanyContactType.\n BASIC_INFO_JOB_TEL.value):\n continue\n if c.relationship == Relationship.FAMILY.value:\n if cycle == Cycle.C1A.value:\n call_history = check_call_history(application)\n break\n if c.useful == ContactsUseful.NONE.value:\n c.useful = check_valid_phone(app_calls, c)\n c.save()\n if c.useful == ContactsUseful.INVALID.value:\n logging.info('Found invalid contact. {}'.format(str\n (c.id)))\n continue\n if cycle == Cycle.C1B.value:\n c1b_family_dict[c.source].append(c.number)\n continue\n if c.relationship == Relationship.SUGGESTED.value:\n if cycle not in (Cycle.C2.value, Cycle.C3.value):\n break\n if cycle == Cycle.C2.value and fc_count > 10:\n break\n if cycle == Cycle.C3.value and fc_count > 20:\n break\n fc_count += 1\n numbers.append(c.number)\n if len(numbers) == 0 or not call_history:\n src_contact = Contact.select().where(Contact.user_id ==\n application.user_id, Contact.source in\n FamilyContactType.c1a_order())\n c1a_family_dict = defaultdict(list)\n for e in src_contact:\n c1a_family_dict[e.source].append(e.number)\n for call_type in FamilyContactType.c1a_order():\n numbers.extend(c1a_family_dict[call_type])\n if cycle == Cycle.C1B.value:\n for call_type in FamilyContactType.c1b_order():\n numbers.extend(c1b_family_dict[call_type])\n numbers = list(set(numbers))\n update_query = AutoCallList.update(numbers=','.join(numbers)\n ).where(AutoCallList.application == application.id)\n update_query.execute()\n\n\n<mask token>\n\n\ndef check_call_history(application):\n app_create_at = application.created_at + timedelta(days=4)\n if datetime.today().date() > app_create_at.date():\n call_actions = CallActions.select().where(CallActions.type == 0, \n CallActions.application == application.id, CallActions.\n created_at > datetime.now() - timedelta(days=5))\n for call in call_actions:\n if call.phone_status == PhoneStatus.CONNECTED.value:\n return True\n return False\n return True\n\n\n@action(MessageAction.BOMBER_SCAVENGER)\ndef scavenger(payload, msg_id):\n scavenger_time = -60\n scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'SCAVENGER_TIME').first()\n if scavenger and scavenger.value.isdigit():\n scavenger_time = -int(scavenger.value)\n update_auto_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value, description='scavenger').where(AutoCallList.status ==\n AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime\n .now() + timedelta(minutes=scavenger_time))\n count = update_auto_call_list.execute()\n logging.info('scavenger processed %s application', count)\n mail_box_scavenger_time = -30\n mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==\n 'MAIL_BOX_SCAVENGER_TIME').first()\n if mail_box_scavenger and mail_box_scavenger.value.isdigit():\n mail_box_scavenger_time = -int(mail_box_scavenger.value)\n update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.\n PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.\n value, AutoCallList.updated_at < datetime.now() + timedelta(minutes\n =mail_box_scavenger_time))\n mail_box_count = update_mail_box_call_list.execute()\n logging.info('scavenger update mail box %s', mail_box_count)\n update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value\n ).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.\n updated_at < datetime.now() + timedelta(minutes=-30))\n ivr_result = update_auto_ivr.execute()\n logging.info('scavenger update %s ivr' % ivr_result)\n\n\n<mask token>\n\n\n@action(MessageAction.REPORT_BOMBER_COLLECTION)\ndef report_bomber_collection(payload, msg_id):\n start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)\n ).scalar()\n now = datetime.now()\n if start_date and str(start_date) == str(now)[:10]:\n return\n end_date = str(now + timedelta(days=1))[:10]\n start_date = str(now)[:10]\n dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))\n all_overdue_loan_sql1 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n GROUP BY 1;\n \"\"\"\n s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()\n d1 = OperatedDict(s_data1)\n all_overdue_loan_sql2 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE DATE(ba.follow_up_date) > CURDATE()\n AND ba.called_counts = 0\n GROUP BY 1;\n \"\"\"\n s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()\n d2 = OperatedDict(s_data2)\n overdue_loans_entered_into_predict_call_system_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE ba.called_counts >= 1\n GROUP BY 1;\n \"\"\"\n s_data3 = readonly_db.execute_sql(\n overdue_loans_entered_into_predict_call_system_sql).fetchall()\n d3 = OperatedDict(s_data3)\n loans_completed_sql = \"\"\"\n SELECT ba.cycle, COUNT(DISTINCT ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()\n d4 = OperatedDict(s_data4)\n connected_calls_automatic_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()\n d5 = OperatedDict(s_data5)\n connected_calls_manual_sql = \"\"\"\n SELECT bb.cycle, COUNT(bb.id)\n FROM bomber.bombing_history bb\n WHERE DATE(bb.created_at) = curdate()\n AND (bb.bomber_id < 150 OR bb.bomber_id > 200)\n GROUP BY bb.cycle;\n \"\"\"\n s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()\n d6 = OperatedDict(s_data6)\n logging.info('Directly get data from database successfully.')\n c1 = d1 - d2\n c2 = d3\n c3 = c2 / c1\n c4 = d4\n c5 = c4 / c2\n c6 = d5\n c7 = c6 / c4\n c8 = d6\n c9 = OperatedDict(get_agent())\n c10 = (c6 + c8) / c9\n try:\n c11 = average_call_duration_team(start_date, end_date)\n except AttributeError:\n c11 = {}\n lst = []\n for i in range(1, 5):\n lst.append({'apply_date': start_date, 'cycle': dct[i],\n 'all_overdue_loan': c1.get(i, 0),\n 'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),\n 'of_overdue_loans_entered_into_predict_call_system': round(c3.\n get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),\n 'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *\n 100, 1), 'connected_calls_automatic': c6.get(i, 0),\n 'connected_calls_automatic_completed_loans': round(c7.get(i, 0),\n 1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i, \n 0), 'average_calls_agent': round(c10.get(i, 0), 1),\n 'average_call_duration_team': round(c11.get(i, 0), 1)})\n ReportCollection.insert_many(lst).execute()\n logging.info('report_bomber_collection:Done!')\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)\ndef bomber_auto_call_list_record(payload, msg_id):\n \"\"\"记录一年的auto_call_list,删除前一天的数据,增加今天的数据\"\"\"\n now = datetime.now()\n if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):\n date_sql = \"\"\"\n SELECT DATE(created_at) FROM auto_call_list_record\n GROUP BY DATE(created_at) limit 1\n \"\"\"\n del_date = db.execute_sql(date_sql).fetchone()[0]\n del_sql = \"\"\"\n DELETE FROM auto_call_list_record WHERE date(created_at) = %s\n \"\"\"\n db.execute_sql(del_sql, [del_date])\n sql = \"\"\"\n INSERT INTO auto_call_list_record\n SELECT * FROM auto_call_list\n \"\"\"\n db.execute_sql(sql)\n logging.info('bomber_auto_call_list_record done')\n\n\n<mask token>\n\n\ndef new_out_record(**kwargs):\n if not kwargs['application_ids']:\n return\n DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.\n bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.\n application << kwargs['application_ids'], DispatchAppHistory.out_at\n .is_null(True)).execute()\n if kwargs.get('month_dispatch'):\n return\n try:\n Application.update(ptp_bomber=None).where(Application.id << kwargs[\n 'application_ids']).execute()\n except Exception as e:\n logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[\n 'application_ids'], str(e)))\n\n\n<mask token>\n\n\ndef end_old_application(old_app, paid=False):\n if paid:\n if old_app.status == OldLoanStatus.WAITING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return\n if old_app.status == OldLoanStatus.PROCESSING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return old_app.application_id\n end_date = old_app.end_date\n now = datetime.now()\n if now >= max(end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n old_app.save()\n return old_app.application_id\n\n\n<mask token>\n\n\ndef in_record(**kwargs):\n \"\"\"\n :param kwargs: dist_partner_id, dist_bomber_id,\n expected_out_time, application_ids\n :return:\n \"\"\"\n kwargs['dist_partner_id'] = kwargs.get('dist_partner_id') or 'null'\n subquery = Application.select(Application.amount, fn.NOW().alias(\n 'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(\n 'application_id'), R(str(kwargs['dist_bomber_id'])).alias(\n 'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.\n alias('entry_overdue_days'), R(str(kwargs['dist_partner_id'])).\n alias('partner_id'), R('\"{}\"'.format(kwargs['expected_out_time'])).\n alias('expected_out_time')).where(Application.status !=\n ApplicationStatus.REPAID.value, Application.id << kwargs[\n 'application_ids'])\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n app_ids = [i.application_id for i in applications]\n bill_list = BillService().bill_list(application_ids=app_ids)\n bill_dict = {str(bill['application_id']): bill for bill in bill_list}\n insert_args = list(map(partial(lambda_result, dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n<mask token>\n\n\ndef start_old_application(old_app, cancel=False):\n application_id = old_app.application_id\n if cancel and old_app.status == OldLoanStatus.PAID.value:\n now = datetime.now()\n if old_app.start_date is None:\n old_app.status = OldLoanStatus.WAITING.value\n elif now >= max(old_app.end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n DispatchAppHistory.update(out_at=max(old_app.end_date, old_app.\n promised_date or now)).where(DispatchAppHistory.bomber_id ==\n old_app.bomber_id, DispatchAppHistory.application ==\n application_id).execute()\n else:\n old_app.status = OldLoanStatus.PROCESSING.value\n DispatchAppHistory.update(out_at=None).where(DispatchAppHistory\n .bomber_id == old_app.bomber_id, DispatchAppHistory.\n application == application_id).execute()\n old_app.save()\n return\n application = Application.get_or_none(Application.id == application_id,\n Application.status != ApplicationStatus.REPAID.value, Application.\n overdue_days > 90, Application.promised_date.is_null(True) | (fn.\n DATE(Application.promised_date) < datetime.today().date()))\n if not application:\n logging.error('Can not set old application %s to start collecting',\n application_id)\n return\n if old_app.status in OldLoanStatus.no_available():\n logging.info('%s has finished or paid', old_app.application_id)\n return\n config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)\n sp = config.get(SCI.OLD_APP_PERIOD, SCI.OLD_APP_PERIOD.default_value)\n old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value\n old_app.status = OldLoanStatus.PROCESSING.value\n old_app.bomber_id = old_app_bomber\n old_app.start_date = datetime.now()\n if not old_app.end_date:\n old_app.end_date = datetime.now() + timedelta(days=sp)\n old_app.save()\n in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,\n application_ids=[old_app.application_id], expected_out_time=str(\n old_app.end_date))\n\n\n<mask token>\n\n\ndef run_member_sql(sql):\n result = [0, 0]\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n sql_result = cursor.fetchone()\n if sql_result:\n result = sql_result\n except Exception as e:\n logging.info('run sql error: %s' % str(sql))\n return result\n\n\n<mask token>\n\n\ndef get_before_bomber(date_time):\n begin_time = str(date_time - timedelta(days=7))\n end_time = str(date_time)\n old_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at<date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (begin_time, begin_time))\n old_data = run_one_sql(old_sql)\n new_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at> '%s' \n and created_at<'%s'\n and overdue_days=1;\n \"\"\"\n % (begin_time, end_time))\n new_data = run_one_sql(new_sql)\n dpd4_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>date_add('%s',interval 1 day) \n and created_at< date_add('%s',interval 1 day)\n and overdue_days=4;\n \"\"\"\n % (begin_time, end_time))\n dpd4_data = run_one_sql(dpd4_sql)\n dpd2_sql = (\n \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at< date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\"\n % (end_time, end_time))\n dpd2_data = run_one_sql(dpd2_sql)\n all_money = old_data + new_data\n repayment = all_money - dpd4_data - dpd2_data\n pro = 0\n if all_money:\n pro = repayment / all_money * 100\n RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,\n proportion=pro, repayment=repayment)\n\n\n<mask token>\n\n\ndef get_c1a_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=19)\n repayment_sql = (\n \"\"\"\n select \n sum(b.principal_part+b.late_fee_part) as paid_amount, \n cdt\n from \n (select \n br.principal_part, br.late_fee_part, \n date(cdt) as cdt, br.repay_at, br.application_id\n from (\n select ba.id, ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n if not repayment:\n return\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n for d in repayment:\n repay = d[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n d[1], RepaymentReportInto.cycle == Cycle.C1A.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = round(repay, 3)\n pro = repay / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_c2_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n not_contain_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_money = run_one_sql(not_contain_sql)\n begin_date = date_time - timedelta(days=37)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n not_contain_repay_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n not_contain_repay = run_all_sql(not_contain_repay_sql)\n if not not_contain_money and repayment:\n return\n for i in ContainOut.values():\n if i == ContainOut.NOT_CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(\n not_contain_money, 3), proportion='0', repayment=0,\n is_first_loan=is_first_loan, contain_out=ContainOut.\n NOT_CONTAIN.value)\n for repay in not_contain_repay:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n elif i == ContainOut.CONTAIN.value:\n RepaymentReportInto.create(time=end_date - timedelta(days=1\n ), cycle=Cycle.C2.value, all_money=round(all_money, 3),\n proportion='0', repayment=0, is_first_loan=\n is_first_loan, contain_out=ContainOut.CONTAIN.value)\n for repay in repayment:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto\n .time == repay[1], RepaymentReportInto.\n is_first_loan == is_first_loan, RepaymentReportInto\n .contain_out == i, RepaymentReportInto.cycle ==\n Cycle.C2.value).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\ndef get_c3_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = (\n \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id, ba.c3_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\"\n % (begin_date, end_date, is_first_loan))\n all_money = run_one_sql(all_money_sql)\n begin_date = date_time - timedelta(days=30)\n repayment_sql = (\n \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c3_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c3_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = '%s'\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 4\n group by 4, 5) b\n group by 2\n \"\"\"\n % (begin_date, end_date, is_first_loan))\n repayment = run_all_sql(repayment_sql)\n RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle\n =Cycle.C3.value, all_money=round(all_money, 3), proportion='0',\n repayment=0, is_first_loan=is_first_loan, contain_out=\n ContainOut.CONTAIN.value)\n if not repayment:\n return\n for repay in repayment:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(RepaymentReportInto.time ==\n repay[1], RepaymentReportInto.cycle == Cycle.C3.value, \n RepaymentReportInto.is_first_loan == is_first_loan).first()\n if report:\n report.repayment = repay_money\n pro = 0\n if report.all_money and int(report.all_money):\n pro = repay_money / report.all_money * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n<mask token>\n\n\ndef get_static_bomber(begin_date):\n active_date = begin_date - timedelta(days=8)\n bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),\n BomberR.last_active_at.alias('active')).where(BomberR.\n last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])\n summary = []\n for bomber in bombers:\n summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':\n bomber.role.cycle, 'work_ind': 0})\n SummaryBomber.insert_many(summary).execute()\n\n\n<mask token>\n\n\n@time_logger\ndef get_call_and_made(end_date, begin_date, real_time_query=False):\n call_sql = (\n \"\"\"\n select \n bomber_id, \n count(case when relationship is not null then application_id end) \n as 'call_cnt', \n count(distinct case when relationship is not null then \n application_id end) as 'call_case',\n count(case when phone_status=4 then application_id end) as 'connect',\n count(distinct case when phone_status=4 then application_id end) \n as 'connect_case'\n from (\n select bomber_id,application_id,phone_status, cycle, relationship\n from bomber.call_actions ba\n where created_at>'%s' and created_at<'%s'\n and type in (0, 1)\n ) a\n group by 1\n \"\"\"\n % (begin_date, end_date))\n calls = run_all_sql(call_sql)\n if real_time_query:\n return calls\n for call in calls:\n bomber, call_cnt, case_made, connect_cnt, case_connect = call\n SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,\n call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(\n SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date\n ).execute()\n return calls\n\n\n@time_logger\ndef get_claimed_cnt(end_date, begin_date, real_time_query=False):\n table_date = begin_date - timedelta(days=30)\n claimed_sql = (\n \"\"\"\n SELECT\n COUNT( `t1`.`application_id` ) AS cnt,\n `t1`.`bomber_id` AS bomber_id \n FROM\n `dispatch_app_history` AS t1 \n WHERE\n ( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null ) \n AND ( `t1`.`bomber_id` != 1000 ) \n AND ( `t1`.`partner_id` IS null ) \n AND ( `t1`.`entry_at` > '%s' ) \n AND ( `t1`.`entry_at` < '%s' ) \n GROUP BY\n `t1`.`bomber_id`\n \"\"\"\n % (begin_date, table_date, end_date))\n claimeds = run_all_sql(claimed_sql)\n if real_time_query:\n return claimeds\n for claimed in claimeds:\n cnt, bomber_id = claimed\n SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == bomber_id).execute()\n return claimeds\n\n\ndef get_sms_data(end_data, begin_data):\n all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(\n 'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')\n ).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.\n created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())\n ).group_by(ConnectHistoryR.operator)\n for sms in all_sms:\n SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==\n begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()\n return all_sms\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_NEW)\ndef summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_active_bomber(begin_date)\n get_call_and_made(end_date, begin_date)\n get_claimed_cnt(end_date, begin_date)\n get_sms_data(end_date, begin_date)\n get_ptp_data(end_date, begin_date)\n get_recover_amount(end_date, begin_date)\n get_unfollowed(begin_date)\n get_unfollowed_call(begin_date)\n\n\ndef get_new_case_amount(begin_date, end_date):\n all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.\n entry_late_fee_pending + DispatchAppHistoryR.\n entry_principal_pending).alias('pending'), DispatchAppHistoryR.\n bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')\n ).where(DispatchAppHistoryR.entry_at > begin_date, \n DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.\n partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)\n for case in all_case:\n SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt\n =case.cnt).where(SummaryBomber.bomber_id == case.bomber_id, \n SummaryBomber.time == begin_date).execute()\n return all_case\n\n\n<mask token>\n\n\ndef get_kp_today(begin_date, end_date):\n sql = (\n \"\"\"\n select bomber_id, count(distinct application_id)\n from( \n SELECT bomber_id, application_id\n FROM bomber.auto_call_actions a\n WHERE promised_date >= '%s' AND created_at < '%s' \n AND EXISTS(select 1 from bomber.application ba \n where a.application_id=ba.id \n and (ba.finished_at is null \n or ba.finished_at > '%s'))\n UNION \n SELECT bomber_id, application_id\n FROM bomber.bombing_history b\n WHERE promised_date >= '%s' AND created_at < '%s'\n AND EXISTS(select 1 from bomber.application ba \n where b.application_id=ba.id \n and (ba.finished_at is null \n or ba.finished_at > '%s'))) result\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, begin_date, begin_date, end_date, begin_date)\n )\n kp_today = run_all_sql(sql)\n for kp in kp_today:\n SummaryBomber.update(KP_today_cnt=kp[1]).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == kp[0]).execute()\n\n\n<mask token>\n\n\ndef get_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c2_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c2_entry > '%s' \n AND ba.c2_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c3_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c3_entry > '%s' \n AND ba.c3_entry < '%s' \n ) a\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date, end_date))\n case_cleared_sums = run_all_sql(sql)\n for clear in case_cleared_sums:\n SummaryBomber.update(new_case_cleared_sum=clear[1]).where(\n SummaryBomber.time == begin_date, SummaryBomber.bomber_id ==\n clear[0]).execute()\n\n\n@time_logger\ndef get_new_case_call(begin_date, end_date, real_query_time=False):\n sql = (\n \"\"\"\n SELECT\n bd.bomber_id,\n count( DISTINCT bd.application_id )\n FROM\n bomber.dispatch_app_history bd\n INNER JOIN bomber.call_actions bc \n ON bd.application_id = bc.application_id \n AND bd.bomber_id = bc.bomber_id \n AND date( bd.entry_at ) = date( bc.created_at ) \n WHERE\n entry_at > '%s' \n AND entry_at < '%s' \n AND partner_id IS NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n new_case_calls = run_all_sql(sql)\n if real_query_time:\n return new_case_calls\n for call in new_case_calls:\n SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber\n .bomber_id == call[0], SummaryBomber.time == begin_date).execute()\n return new_case_calls\n\n\n@time_logger\ndef get_calltime_avg(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction,\n count( 1 ) AS auto_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' ' \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n AND an.recording is not null\n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]][0] += data[1]\n result[data[0]][1] += data[2]\n continue\n result[data[0]] = [data[1], data[2]]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=\n value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == key).execute()\n return result\n\n\ndef get_no_calltime_avg(begin_date, end_date):\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND (an.status!='ANSWERED' or an.recording is null) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n for data in manuals:\n SummaryBomber.update(calltime_no_case_sum=data[1],\n calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /\n data[2] if data[2] else 0).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == data[0]).execute()\n\n\n@time_logger\ndef get_calltime_sum(begin_date, end_date, real_query_time=False):\n autos_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' '\n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date))\n autos = run_all_sql(autos_sql)\n manual_sql = (\n \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\"\n % (begin_date, end_date, '5%', '3%'))\n manuals = run_all_sql(manual_sql)\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]] += data[1]\n continue\n result[data[0]] = data[1]\n if real_query_time:\n return result\n for key, value in result.items():\n SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == key).execute()\n return result\n\n\n<mask token>\n\n\ndef get_unfollowed_call(begin_date):\n sql = (\n \"\"\"\n SELECT\n bomber_id,\n count(1)\n FROM\n (\n SELECT\n bd.application_id,\n date(bd.entry_at) AS entry_at,\n bd.bomber_id,\n date(bd.out_at) AS out_at\n FROM\n bomber.dispatch_app_history bd\n WHERE\n (\n out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)\n OR out_at IS NULL\n )\n AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)\n AND partner_id IS NULL\n AND NOT EXISTS (\n SELECT\n 1\n FROM\n bomber.call_actions bc\n WHERE\n bd.bomber_id = bc.bomber_id\n AND bc.application_id = bd.application_id\n AND bc.created_at < '%(begin_date)s'\n )\n ) a\n WHERE\n EXISTS (\n SELECT\n 1\n FROM\n bomber.call_actions bc\n WHERE\n a.application_id = bc.application_id\n AND a.bomber_id = bc.bomber_id\n AND bc.created_at > '%(begin_date)s'\n AND bc.created_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n AND bc.created_at >= a.entry_at\n )\n OR EXISTS (\n SELECT\n 1\n FROM\n bomber.application ba\n WHERE\n ba.id = a.application_id\n AND ba.finished_at > '%(begin_date)s'\n AND ba.finished_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n )\n GROUP BY\n 1\n \"\"\"\n % {'begin_date': begin_date})\n data = run_all_sql(sql)\n result = defaultdict(int)\n for d in data:\n result[d[0]] += d[1]\n bomber_list = []\n for key, value in result.items():\n bomber_list.append(key)\n SummaryBomber.update(unfollowed_call_cnt=SummaryBomber.\n new_case_call_cnt + value).where(SummaryBomber.time ==\n begin_date, SummaryBomber.bomber_id == key).execute()\n update_sql = SummaryBomber.update(unfollowed_call_cnt=SummaryBomber.\n new_case_call_cnt).where(SummaryBomber.time == begin_date)\n if bomber_list:\n update_sql = update_sql.where(SummaryBomber.bomber_id.not_in(\n bomber_list))\n update_sql.execute()\n return result\n\n\n@action(MessageAction.UPDATE_SUMMARY_NEW)\ndef update_summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'UPDATE_SUMMARY_NEW').first()\n if worker_log and worker_log.logs >= 5:\n return\n get_new_case_amount(begin_date, end_date)\n get_kp_cleared(begin_date, end_date)\n get_kp_today(begin_date, end_date)\n get_ptp_cnt(begin_date, end_date)\n get_ptp_call_cnt(begin_date, end_date)\n get_new_case_cleared(begin_date, end_date)\n get_new_case_call(begin_date, end_date)\n get_calltime_avg(begin_date, end_date)\n get_no_calltime_avg(begin_date, end_date)\n get_calltime_sum(begin_date, end_date)\n\n\ndef get_cycle_claimed(begin_date, end_date):\n sql = (\n \"\"\"\n select cycle,count(1)\n from bomber.application where cycle in (1,2,3,4)\n and (finished_at is null or (finished_at>'%s'))\n and created_at>'2018-09-01'\n group by 1\n \"\"\"\n % begin_date)\n result = run_all_sql(sql)\n return result\n\n\n@time_logger\ndef cycle_new_case(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( ba.id ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.created_at ) = bo.which_day_overdue \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c1b_entry ) = bo.which_day_overdue\n WHERE\n c1b_entry > '%s' \n AND c1b_entry < '%s' \n UNION\n SELECT\n 3 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c2_entry ) = bo.which_day_overdue \n WHERE\n c2_entry > '%s' \n AND c2_entry < '%s' \n UNION\n SELECT\n 4 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c3_entry ) = bo.which_day_overdue\n WHERE\n c3_entry > '%s' \n AND c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n all_datas = run_all_sql(sql)\n if real_time_query:\n return all_datas\n for data in all_datas:\n SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]\n ).where(SummaryBomber.time == begin_date, SummaryBomber.\n bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()\n return all_datas\n\n\n@time_logger\ndef get_cycle_new_case_call(begin_date, end_date, real_time_query=False):\n sql = (\n \"\"\"\n SELECT\n 1 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.created_at ) = date( bc.created_at ) \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c1b_entry ) = date( bc.created_at ) \n WHERE\n ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s'\n UNION\n SELECT\n 3 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c2_entry ) = date( bc.created_at ) \n WHERE\n ba.c2_entry > '%s' \n AND ba.c2_entry < '%s'\n UNION\n SELECT\n 4 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c3_entry ) = date( bc.created_at ) \n WHERE\n ba.c3_entry > '%s' \n AND ba.c3_entry < '%s'\n \"\"\"\n % (begin_date, end_date, begin_date, end_date, begin_date,\n end_date, begin_date, end_date))\n cycle_datas = run_all_sql(sql)\n if real_time_query:\n return cycle_datas\n for data in cycle_datas:\n SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber\n .time == begin_date, SummaryBomber.cycle == data[0], \n SummaryBomber.bomber_id == data[0]).execute()\n return cycle_datas\n\n\ndef get_cycle_new_case_cleared(begin_date, end_date):\n sql = (\n \"\"\"\n SELECT\n '1' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.created_at ) = date( br.repay_at ) \n WHERE ba.created_at > '%s' \n AND ba.created_at < '%s' \n GROUP BY 1, 2 ) a \n UNION\n SELECT \n '2' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n GROUP BY 1, 2) a\n \"\"\"\n % (begin_date, end_date, begin_date, end_date))\n cycle_cleared = run_all_sql(sql)\n for i in cycle_cleared:\n SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber\n .cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.\n time == begin_date).execute()\n\n\ndef get_cycle_case_made_cnt(begin_date, end_date):\n sql = (\n \"\"\"\n select cycle,count(distinct application) from (\n select distinct cycle,application from bomber.auto_call_list_record\n where created_at >= '%s'\n and created_at < '%s'\n and called_counts <> 0\n and cycle in (1,2,3,4)\n union\n select distinct cycle,application_id from bomber.call_actions\n where created_at >= '%s'\n and created_at < '%s'\n and cycle in (1,2,3,4)\n ) c\n group by 1\n \"\"\"\n % (begin_date, end_date, begin_date, end_date))\n case_made_datas = run_all_sql(sql)\n for case_made_data in case_made_datas:\n SummaryBomber.update(case_made_cnt=case_made_data[1]).where(\n SummaryBomber.time == begin_date, SummaryBomber.cycle ==\n case_made_data[0], SummaryBomber.bomber_id == case_made_data[0]\n ).execute()\n\n\n@action(MessageAction.SUMMARY_NEW_CYCLE)\ndef summary_new_cycle(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')\n ).where(WorkerLog.created_at >= end_date, WorkerLog.action ==\n 'SUMMARY_NEW_CYCLE').first()\n if worker_log and worker_log.logs >= 5:\n return\n cycle_datas = SummaryBomber.select(fn.SUM(SummaryBomber.\n new_case_amount_sum).alias('new_case_amount_sum'), fn.SUM(\n SummaryBomber.new_case_cleared_sum).alias('new_case_cleared_sum'),\n fn.SUM(SummaryBomber.case_made_cnt).alias('case_made_cnt'), fn.SUM(\n SummaryBomber.case_connect_cnt).alias('case_connect_cnt'), fn.SUM(\n SummaryBomber.promised_cnt).alias('promised_cnt'), fn.SUM(\n SummaryBomber.promised_amount).alias('promised_amount'), fn.SUM(\n SummaryBomber.cleared_cnt).alias('cleared_cnt'), fn.SUM(\n SummaryBomber.cleared_amount).alias('cleared_amount'), fn.SUM(\n SummaryBomber.new_case_cnt).alias('new_case_cnt'), fn.SUM(\n SummaryBomber.new_case_call_cnt).alias('new_case_call_cnt'), fn.SUM\n (SummaryBomber.unfollowed_cnt).alias('unfollowed_cnt'), fn.SUM(\n SummaryBomber.unfollowed_call_cnt).alias('unfollowed_call_cnt'), fn\n .SUM(SummaryBomber.call_cnt).alias('call_cnt'), fn.SUM(\n SummaryBomber.sms_cnt).alias('sms_cnt'), fn.SUM(SummaryBomber.\n call_connect_cnt).alias('call_connect_cnt'), fn.SUM(SummaryBomber.\n ptp_today_cnt).alias('ptp_today_cnt'), fn.SUM(SummaryBomber.\n ptp_today_call_cnt).alias('ptp_today_call_cnt'), fn.SUM(\n SummaryBomber.ptp_next_cnt).alias('ptp_next_cnt'), fn.SUM(\n SummaryBomber.ptp_next_call_cnt).alias('ptp_next_call_cnt'), fn.SUM\n (SummaryBomber.KP_cleared_cnt).alias('KP_cleared_cnt'), fn.SUM(\n SummaryBomber.KP_today_cnt).alias('KP_today_cnt'), fn.SUM(\n SummaryBomber.work_ind).alias('work_ind'), fn.SUM(SummaryBomber.\n calltime_sum).alias('calltime_sum'), fn.SUM(SummaryBomber.\n calltime_case_sum).alias('calltime_case_sum'), fn.SUM(SummaryBomber\n .calltime_case_cnt).alias('calltime_case_cnt'), fn.SUM(\n SummaryBomber.calltime_no_case_sum).alias('calltime_no_case_sum'),\n fn.SUM(SummaryBomber.calltime_no_case_cnt).alias(\n 'calltime_no_case_cnt'), SummaryBomber.cycle.alias('cycle')).where(\n SummaryBomber.time == begin_date, SummaryBomber.cycle << Cycle.values()\n ).group_by(SummaryBomber.cycle)\n for cycle_data in cycle_datas:\n SummaryBomber.create(bomber_id=cycle_data.cycle, time=begin_date,\n cycle=cycle_data.cycle, new_case_amount_sum=cycle_data.\n new_case_amount_sum, new_case_cleared_sum=cycle_data.\n new_case_cleared_sum, new_case_cleard_rate=0, case_made_cnt=\n cycle_data.case_made_cnt, case_made_rate=0, case_connect_cnt=\n cycle_data.case_connect_cnt, case_connect_rate=0, promised_cnt=\n cycle_data.promised_cnt, promised_amount=cycle_data.\n promised_amount, cleared_cnt=cycle_data.cleared_cnt,\n cleared_amount=cycle_data.cleared_amount, new_case_cnt=\n cycle_data.new_case_cnt, new_case_call_cnt=cycle_data.\n new_case_call_cnt, unfollowed_cnt=cycle_data.unfollowed_cnt,\n unfollowed_call_cnt=cycle_data.unfollowed_call_cnt, call_cnt=\n cycle_data.call_cnt, sms_cnt=cycle_data.sms_cnt,\n call_connect_cnt=cycle_data.call_connect_cnt, calltime_case_avg\n =0, ptp_today_cnt=cycle_data.ptp_today_cnt, ptp_today_call_cnt=\n cycle_data.ptp_today_call_cnt, ptp_next_cnt=cycle_data.\n ptp_next_cnt, ptp_next_call_cnt=cycle_data.ptp_next_call_cnt,\n KP_cleared_cnt=cycle_data.KP_cleared_cnt, KP_today_cnt=\n cycle_data.KP_today_cnt, KP_cleared_rate=0, work_ind=cycle_data\n .work_ind, calltime_sum=cycle_data.calltime_sum,\n calltime_case_sum=cycle_data.calltime_case_sum,\n calltime_case_cnt=cycle_data.calltime_case_cnt,\n calltime_no_case_sum=cycle_data.calltime_no_case_sum,\n calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,\n work_time_sum=cycle_data.work_time_sum)\n cycle_claimed = get_cycle_claimed(begin_date, end_date)\n for claimed in cycle_claimed:\n SummaryBomber.update(claimed_cnt=claimed[1]).where(SummaryBomber.\n time == begin_date, SummaryBomber.cycle == claimed[0], \n SummaryBomber.bomber_id == claimed[0]).execute()\n cycle_new_case(begin_date, end_date)\n get_cycle_new_case_call(begin_date, end_date)\n get_cycle_new_case_cleared(begin_date, end_date)\n get_cycle_case_made_cnt(begin_date, end_date)\n all_datas = SummaryBomber.filter(SummaryBomber.time == begin_date)\n for data in all_datas:\n cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum if\n data.new_case_amount_sum else 0) * 100\n data.new_case_cleard_rate = cl_rat\n case_made_rate = (data.case_made_cnt / data.claimed_cnt if data.\n claimed_cnt else 0) * 100\n data.case_made_rate = case_made_rate\n case_connect_rate = (data.case_connect_cnt / data.case_made_cnt if\n data.case_made_cnt else 0) * 100\n data.case_connect_rate = case_connect_rate\n calltime_case_avg = (data.calltime_case_sum / data.\n calltime_case_cnt if data.calltime_case_cnt else 0)\n data.calltime_case_avg = calltime_case_avg\n calltime_no_case_avg = (data.calltime_no_case_sum / data.\n calltime_no_case_cnt if data.calltime_no_case_cnt else 0)\n data.calltime_no_case_avg = calltime_no_case_avg\n KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt if data.\n KP_today_cnt else 0) * 100\n data.KP_cleared_rate = KP_cleared_rate\n data.save()\n\n\n<mask token>\n\n\ndef get_change_bomber():\n cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle\n .C3.value}\n result = {}\n bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,\n BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=\n BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.\n created_at) == date.today(), BomberLog.role_id << list(\n cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.\n instalment == 0).dicts()\n for b_log in bomber_logs:\n cycle = cycle_role_map.get(b_log['role_id'])\n group_id = b_log['group_id']\n if cycle in result:\n if group_id not in result[cycle]:\n result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}\n else:\n result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],\n 'new_ids': []}}\n if b_log['operation'] == 0:\n result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])\n if result:\n bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map\n .keys())), Bomber.is_del == 0, Bomber.instalment == 0)\n for b in bombers:\n cycle_result = result.get(cycle_role_map[b.role_id], {})\n role_result = cycle_result.get(b.group_id)\n if not role_result:\n continue\n role_result['new_ids'].append(b.id)\n resutl_list = []\n for cycle, group_dict in result.items():\n resutl_list.extend(list(group_dict.values()))\n return resutl_list\n return []\n\n\n<mask token>\n\n\ndef get_average_number(app_nums, bomber_nums):\n average = app_nums // bomber_nums\n remainder = app_nums % bomber_nums\n average_list = [average for i in range(bomber_nums)]\n if remainder == 0:\n return average_list\n for i in range(remainder):\n average_list[i] += 1\n random.shuffle(average_list)\n return average_list\n\n\n<mask token>\n\n\ndef get_surplus_application(new_ids, del_ids, average_nums, classified_apps):\n surplus_apps = []\n for del_id in del_ids:\n del_res = classified_apps.get(del_id, {})\n p_list = del_res.get('p_list', [])\n np_list = del_res.get('np_list', [])\n del_res['need_num'] = -(len(p_list) + len(np_list))\n del_res['to_list'] = np_list + p_list\n surplus_apps.extend(p_list)\n surplus_apps.extend(np_list)\n for index, bid in enumerate(new_ids):\n average = average_nums[index]\n bomber_app = classified_apps.get(bid)\n if not bomber_app:\n bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid\n ).first()\n bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':\n [], 'np_num': 0, 'need_num': average, 'partner_id': bomber.\n partner_id if bomber else ''}\n classified_apps[bid] = bomber_app\n else:\n p_num = len(bomber_app['p_list'])\n np_num = len(bomber_app['np_list'])\n if p_num > average:\n bomber_app['need_num'] = -np_num\n else:\n bomber_app['need_num'] = average - (p_num + np_num)\n bomber_app['p_num'] = p_num\n bomber_app['np_num'] = np_num\n if bomber_app['need_num'] < 0:\n random.shuffle(bomber_app['np_list'])\n res_over = bomber_app['np_list'][:-bomber_app['need_num']]\n bomber_app['to_list'] = res_over\n surplus_apps.extend(res_over)\n classified_apps_list = sorted(classified_apps.values(), key=lambda x: x\n ['need_num'], reverse=True)\n return surplus_apps, classified_apps_list\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)\ndef bomber_dispatch_applications(payload, msg_id):\n change_bombers = get_change_bomber()\n instalment_change_bombers = get_instalment_change_bomber()\n params = {ApplicationType.CASH_LOAN.value: change_bombers,\n ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}\n for type, bombers in params.items():\n change_bomber_dispatch_apps(change_bombers=bombers, type=type)\n\n\n@action(MessageAction.REPAIR_BOMBER)\ndef repair_bomber(payload, msg_id):\n app_mobile = payload['app_mobile']\n username = payload.get('user_name')\n logging.info('start repair bomber, number: %s' % app_mobile)\n if 'mobile_no' in payload and payload['mobile_no']:\n mobile = number_strip(str(payload['mobile_no']))[:64]\n name = payload.get('mobile_name')\n application = Application.filter(Application.user_mobile_no == mobile)\n if application.exists():\n repair_contact(app_mobile, application, username)\n add_relationship(app_mobile, mobile, username, name)\n if 'tel_no' in payload and payload['tel_no']:\n tel_no = number_strip(str(payload['tel_no']))[:64]\n name = payload.get('tel_name')\n application = Application.filter(Application.user_mobile_no == tel_no)\n if application.exists():\n repair_contact(app_mobile, application, username)\n add_relationship(app_mobile, tel_no, username, name)\n\n\n<mask token>\n\n\ndef get_summary_daily_time():\n mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')\n mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')\n now_date = datetime.now()\n now_date_time = now_date.time()\n today_str = str(now_date.date())\n if now_date_time < mid_time_t1.time():\n yes_date = now_date - timedelta(days=1)\n yes_date_str = str(yes_date.date())\n begin_str = yes_date_str + ' 17:20:00'\n end_str = today_str + ' 00:00:00'\n elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():\n begin_str = today_str + ' 00:00:00'\n end_str = today_str + ' 12:40:00'\n else:\n begin_str = today_str + ' 12:40:00'\n end_str = today_str + ' 17:20:00'\n begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')\n end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')\n summary_datetime = now_date - timedelta(minutes=30)\n summary_date = summary_datetime.date()\n return begin_time, end_time, summary_date\n\n\n@action(MessageAction.SUMMARY_DAILY)\ndef summary_daily_data(payload, msg_id):\n begin_time, end_time, summary_date = get_summary_daily_time()\n call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.\n bomber_id, CallActionsR.application_id, CallActionsR.promised_date,\n CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(\n CallActionsR.created_at >= begin_time, CallActionsR.created_at <\n end_time, CallActionsR.type << (0, 1))\n summary_dailys = {}\n for call in call_actions:\n if call.bomber_id not in summary_dailys:\n summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.\n bomber_id, 'summary_date': str(summary_date)}\n if call.name and call.number:\n summary_dailys[call.bomber_id]['call_cnt'] += 1\n if call.promised_date:\n summary_dailys[call.bomber_id]['ptp_cnt'] += 1\n C1_sql = (\n \"\"\"\n SELECT a.current_bomber_id,\n sum(principal_part+late_fee_part) as pay_amount,a.cycle\n from \n (select a.cycle,a.current_bomber_id,b.username,a.principal_part,\n a.late_fee_part,a.application_id,a.repay_at\n FROM bomber.repayment_log a ,bomber.bomber b\n WHERE a.repay_at >= '%s' AND a.repay_at <'%s'\n AND a.current_bomber_id !=''\n AND a.current_bomber_id = b.id\n and b.role_id in (1,2,4,5)\n and principal_part+late_fee_part>0\n group by 6,7) a\n GROUP BY a.cycle,a.current_bomber_id\n \"\"\"\n % (begin_time, end_time))\n C1_repayment = run_all_sql(C1_sql)\n other_sql = (\n \"\"\"\n select current_bomber_id,sum(pay_amount) as pay_amount,cycle\n from (\n select application_id,current_bomber_id,pay_amount,repay_at,cycle\n from (\n select br.application_id,br.current_bomber_id,\n br.principal_part+br.late_fee_part as pay_amount,br.repay_at,\n br.cycle\n from bomber.repayment_log br\n left join bomber.bomber bb on br.current_bomber_id=bb.id\n where exists (select 1 from bomber.bombing_history bb \n where br.current_bomber_id=bb.bomber_id \n and br.application_id=bb.application_id \n and bb.created_at<br.repay_at \n and (bb.promised_date is not null \n or bb.promised_amount is not null))\n and br.repay_at >= '%s'\n and br.repay_at < '%s'\n and bb.role_id in (3,6,7,8,9) \n and br.principal_part+br.late_fee_part > 0\n group by 1,4\n ) a\n group by 1,4) b\n group by 1\n \"\"\"\n % (begin_time, end_time))\n other_repayment = run_all_sql(other_sql)\n all_repayment = C1_repayment + other_repayment\n for res in all_repayment:\n bomber_id, pay_amount, cycle = res\n if bomber_id in summary_dailys:\n summary_dailys[bomber_id]['repayment'] += pay_amount\n else:\n summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,\n 'cycle': cycle, 'repayment': pay_amount, 'bomber_id':\n bomber_id, 'summary_date': str(summary_date)}\n insert_values = list(summary_dailys.values())\n if insert_values:\n SummaryDaily.insert_many(insert_values).execute()\n\n\n<mask token>\n\n\ndef get_app_logs(apps):\n app_logs = {}\n all_np_apps = []\n all_p_apps = []\n for a in apps:\n latest_bomber = a['latest_bomber']\n latest_bomber = a['cycle'] if not latest_bomber else latest_bomber\n if latest_bomber in app_logs:\n app_logs[latest_bomber]['to_ids'].append(a['id'])\n else:\n app_logs[latest_bomber] = {'bomber_id': latest_bomber, 'to_ids':\n [a['id']], 'np_ids': [], 'p_ids': []}\n if a['promised_date'] and a['promised_date'].date() >= datetime.now(\n ).date():\n app_logs[latest_bomber]['p_ids'].append(a['id'])\n all_p_apps.append(a)\n else:\n app_logs[latest_bomber]['np_ids'].append(a['id'])\n all_np_apps.append(a)\n return app_logs, all_np_apps, all_p_apps\n\n\ndef month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):\n apps = list(apps)\n np_apps = list(np_apps)\n random.shuffle(np_apps)\n apps_len = len(apps)\n np_apps_len = len(np_apps)\n end = 0\n all_app_precentage = 0\n partners = Partner.select().where(Partner.cycle == cycle, Partner.\n status == PartnerStatus.NORMAL.value)\n for p in partners:\n all_app_precentage += p.app_percentage\n for partner in partners:\n bombers = Bomber.select().where(Bomber.partner == partner.id, \n Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.\n value)\n bids = {b.id: b for b in bombers}\n if len(bids) == 0:\n logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))\n continue\n start = end\n if np_apps_len >= int(apps_len * all_app_precentage):\n end = start + int(apps_len * partner.app_percentage)\n else:\n end = start + int(np_apps_len * partner.app_percentage /\n all_app_precentage)\n partner_app = np_apps[start:end]\n dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)\n np_apps = np_apps[end:]\n return np_apps\n\n\ndef month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):\n sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',\n (4): 'AB_TEST_C3'}\n sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])\n sys_values = json.loads(sys_config.value)\n bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0\n )\n if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):\n bombers = bombers.where(Bomber.instalment == 0)\n bids = {b.id: b for b in bombers}\n if cycle == Cycle.C1A.value:\n np_ids = [a['id'] for a in np_apps]\n np = Application.update(status=ApplicationStatus.PROCESSING.value,\n ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids\n ).execute()\n bomber_app_logs = app_logs.get(cycle, {})\n out_param = {'application_ids': bomber_app_logs.get('to_ids', []),\n 'month_dispatch': 1, 'src_bomber_id': cycle}\n new_out_record(**out_param)\n in_param = {'cycle': cycle, 'application_ids': np_ids,\n 'dest_bomber_id': cycle}\n new_in_record(**in_param)\n bomber_app_logs['need_num'] = len(np_apps)\n bomber_app_logs['form_ids'] = np_ids\n bomber_app_logs['status'] = 1\n else:\n dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)\n dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)\n\n\ndef dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,\n type=ApplicationType.CASH_LOAN.value):\n apps = list(apps)\n random.shuffle(apps)\n bids_list = list(bids.keys())\n if len(bids_list) <= 0:\n logging.info('get_dispatch_app_to_bomber no bids')\n return\n average_num = get_average_number(len(apps), len(bids_list))\n bomber_end = 0\n with db.atomic():\n for index, bid in enumerate(bids_list):\n current_bomber = bids.get(bid)\n bomber_app_logs = app_logs.get(bid, {})\n bomber_start = bomber_end\n bomber_end = bomber_start + average_num[index]\n bomber_apps = apps[bomber_start:bomber_end]\n from_p, from_np, from_ids, status = [], [], [], 0\n for ba in bomber_apps:\n promised_date = ba.get('promised_date')\n from_ids.append(ba['id'])\n if promised_date and promised_date.date() >= date.today():\n from_p.append(ba['id'])\n else:\n from_np.append(ba['id'])\n app_status = ApplicationStatus.AB_TEST.value\n if (cycle == Cycle.C1A.value and not out_partner and type ==\n ApplicationType.CASH_LOAN.value):\n app_status = ApplicationStatus.PROCESSING.value\n if from_p:\n p = Application.update(ptp_bomber=bid, latest_bomber=bid,\n status=app_status).where(Application.id << from_p).execute(\n )\n p_ids = bomber_app_logs.get('p_ids', []) + from_p\n bomber_app_logs['p_ids'] = p_ids\n if from_np:\n np = Application.update(latest_bomber=bid, ptp_bomber=None,\n status=ApplicationStatus.AB_TEST.value).where(\n Application.id << from_np).execute()\n np_ids = bomber_app_logs.get('np_ids', []) + from_np\n bomber_app_logs['np_ids'] = np_ids\n in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.\n partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}\n if type == ApplicationType.CASH_LOAN.value:\n out_param = {'src_bomber_id': bid, 'application_ids':\n bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}\n new_out_record(**out_param)\n new_in_record(**in_param)\n else:\n out_and_in_record_instalment(**in_param)\n bomber_app_logs['status'] = 1\n need_num = bomber_app_logs.get('need_num', 0) + average_num[index]\n bomber_app_logs['need_num'] = need_num\n all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids\n bomber_app_logs['form_ids'] = all_form_ids\n if not out_partner:\n continue\n try:\n DispatchApp.delete().where(DispatchApp.application.in_(\n from_ids)).execute()\n dispatch_ins = [{'application': id, 'partner':\n current_bomber.partner_id, 'bomber': bid, 'status':\n DisAppStatus.NORMAL.value} for id in from_ids]\n DispatchApp.insert_many(dispatch_ins).execute()\n except Exception as e:\n logging.info(\n 'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str\n (e), bid, from_ids))\n\n\n<mask token>\n\n\ndef calc_entry_time(overdue_days):\n app_entry_time = {}\n overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],\n 'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}\n for key, value in overdue_entry.items():\n if value[0] <= overdue_days <= value[1]:\n app_entry_time[key] = datetime.now()\n else:\n app_entry_time[key] = None\n return app_entry_time\n\n\n<mask token>\n\n\n@action(MessageAction.SUMMARY_BOMBER_OVERDUE)\ndef summary_bomber_overdue_everyday(payload, msg_id):\n cycle_list = Cycle.values()\n which_day = date.today()\n for cycle in cycle_list:\n apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,\n ApplicationR.ptp_bomber, ApplicationR.overdue_days,\n ApplicationR.promised_date, ApplicationR.follow_up_date,\n ApplicationR.external_id, OverdueBillR.status, OverdueBillR.\n periods, OverdueBillR.sub_bill_id).join(OverdueBillR,\n JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id\n ).where(ApplicationR.status != ApplicationStatus.REPAID.value, \n ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()\n bomber_overdue_list = []\n for app in apps:\n status = app.get('status')\n if status == ApplicationStatus.REPAID.value:\n continue\n ptp_bomber = app.get('ptp_bomber')\n promised_date = app.get('promised_date')\n follow_up_date = app.get('follow_up_date')\n if not promised_date or promised_date.date() < date.today():\n ptp_bomber = promised_date = None\n if not follow_up_date or follow_up_date.date() < date.today():\n follow_up_date = None\n overdue_dict = {'collection_id': app.get('id'), 'external_id':\n app.get('external_id'), 'sub_bill_id': app.get(\n 'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app\n .get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':\n ptp_bomber, 'promised_date': promised_date,\n 'follow_up_date': follow_up_date, 'which_day': which_day,\n 'overdue_days': app.get('overdue_days')}\n bomber_overdue_list.append(overdue_dict)\n try:\n if bomber_overdue_list:\n with db.atomic():\n for index in range(0, len(bomber_overdue_list), 1000):\n insert_list = bomber_overdue_list[index:index + 1000]\n BomberOverdue.insert_many(insert_list).execute()\n except Exception as e:\n logging.info(\n 'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %\n (cycle, str(which_day), str(e)))\n\n\n<mask token>\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)\ndef today_ptp_auto_call_switch(payload, msg_id):\n today = datetime.today().date()\n next_day = today + timedelta(days=1)\n apps = ApplicationR.select(ApplicationR.latest_bomber).where(\n ApplicationR.promised_date < next_day, ApplicationR.promised_date >=\n today, ApplicationR.promised_date.is_null(False), ApplicationR.\n status != ApplicationStatus.REPAID.value, ApplicationR.cycle <\n Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(\n ApplicationR.latest_bomber)\n bids = [a.latest_bomber_id for a in apps]\n if not bids:\n return\n q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(\n BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids\n ).execute()\n\n\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)\ndef update_today_switch_every_day(payload, msg_id):\n q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(\n BomberPtp.auto_ext.is_null(False)).execute()\n\n\n<mask token>\n",
"step-5": "import traceback\nfrom functools import partial\nimport json\nimport logging\nfrom collections import defaultdict\nfrom itertools import cycle as CycleIter\nfrom datetime import datetime, date, timedelta\nfrom decimal import Decimal\nimport random\nfrom copy import deepcopy\nfrom math import ceil\n\nimport boto3\nimport bottle\nfrom peewee import fn, SQL, JOIN_LEFT_OUTER, JOIN_INNER, R\nfrom mongoengine import Q\nfrom deprecated.sphinx import deprecated\n\nfrom bomber.api import (\n AccountService,\n MessageService,\n AuditService,\n BillService,\n Dashboard,\n GoldenEye,\n Hyperloop,\n Message,\n Scout)\nfrom bomber.constant_mapping import (\n AutoCallMessageCycle,\n ApplicationStatus,\n RealRelationship,\n BomberCallSwitch,\n CallActionCommit,\n ApplicantSource,\n ApplicationType,\n EscalationType,\n ApprovalStatus,\n AutoListStatus,\n AutoCallResult,\n BeforeInBomber,\n PriorityStatus,\n InboxCategory,\n OldLoanStatus,\n BombingResult,\n ContactStatus,\n SpecialBomber,\n PartnerStatus,\n Relationship,\n ConnectType,\n SubRelation,\n PhoneStatus,\n ContactType,\n SmsChannel,\n ContainOut,\n FIRSTLOAN,\n AppName,\n RipeInd,\n Cycle,\n ContactsUseful,\n DisAppStatus,\n BomberStatus,\n PartnerType)\nfrom bomber.controllers.templates import cs_number_conf\nfrom bomber.controllers.report_calculation.collection_tool import (\n average_call_duration_team\n)\nfrom bomber.controllers.report_calculation.collection_agent import get_agent\nfrom bomber.db import db, readonly_db\nfrom bomber.models_readonly import (\n DispatchAppHistoryR,\n AutoCallActionsR,\n ConnectHistoryR,\n ApplicationR,\n CallActionsR,\n OverdueBillR,\n BomberR)\nfrom bomber.models import (\n ManualCallListStatus,\n RepaymentReportInto,\n OldLoanApplication,\n DispatchAppHistory,\n CompanyContactType,\n FamilyContactType,\n ReportCollection,\n RepaymentReport,\n AutoCallActions,\n DispatchAppLogs,\n ConnectHistory,\n BombingHistory,\n ManualCallList,\n AutoIVRActions,\n SummaryBomber,\n SummaryDaily,\n IVRCallStatus,\n BomberOverdue,\n AutoCallList,\n AutoIVRStatus,\n SystemConfig,\n RepaymentLog,\n IVRActionLog,\n TotalContact,\n Application,\n CallActions,\n DispatchApp,\n OverdueBill,\n Escalation,\n BomberPtp,\n WorkerLog,\n BomberLog,\n CycleList,\n Template,\n Transfer,\n Summary2,\n AutoIVR,\n Partner,\n Contact,\n CallLog,\n Summary,\n Bomber,\n Inbox,\n Role,\n SCI,\n)\nfrom bomber.sns import MessageAction, send_to_default_q\nfrom bomber.utils import (\n get_cycle_by_overdue_days,\n str_no_utc_datetime,\n no_utc_datetime,\n gender_ktpnum,\n list_to_dict,\n birth_dt_ktp,\n number_strip,\n utc_datetime,\n OperatedDict,\n average_gen,\n time_logger,\n idg,\n)\nfrom bomber.report_work import get_every_cycle_report\n\napp = bottle.default_app()\nclient = boto3.client('sqs')\n#对外展示dict,key-函数名;v-函数数组\nactions = {}\n\n\ndef action(msg_action):\n action_name = msg_action.value.lower()\n if action_name not in actions:\n actions[action_name] = []\n\n def wrapper(func):\n actions[action_name].append(func)\n return func\n return wrapper\n\n\n@action(MessageAction.BOMBER_HEALTH_CHECK)\ndef health_check(payload, msg_id):\n pass\n\n\ndef dpd1_classify(item, lst):\n app_name = str(item['app_name']).upper()\n key = '{}_{}_DPD1'.format(app_name, str(item['su']))\n if key in BeforeInBomber.keys():\n lst[key].append(item['id'])\n return lst\n\n\ndef dpd1_process(lst):\n \"\"\"已废弃的方法\"\"\"\n if not lst:\n return\n\n for key, l in lst.items():\n rule = getattr(BeforeInBomber, key).value\n query = (AutoIVRActions\n .select(fn.DISTINCT(AutoIVRActions.loanid))\n .where(AutoIVRActions.loanid.in_(l),\n AutoIVRActions.group.in_(rule.get('group')),\n AutoIVRActions.callstate\n .in_(IVRCallStatus.call_success())))\n success_set = {i.loanid for i in query}\n failed_list = list(set(l) - success_set)\n post_params = {\n '$and': rule.get('$and'),\n 'app_list': failed_list\n }\n resp = Hyperloop().post(\"/bomber/score/verify\", json=post_params)\n if not resp.ok:\n logging.error(\n 'hyperloop score verification failed: %s, %s',\n str(resp.status_code),\n str(resp.text)\n )\n logging.error('hyperloop score verification failed: %s',\n str(post_params))\n continue\n\n logging.debug('hyperloop score verification success: %s', resp.content)\n resp_json = resp.json()\n # dpd1 提前进入bomber\n app_list = resp_json['data']\n if not app_list:\n continue\n for item in app_list:\n # 做ab_test,三分之一的人提前入催\n if random.randint(0, 5) == 1:\n send_to_default_q(\n MessageAction.APPLICATION_BOMBER,\n {'id': int(item)}\n )\n\n\n# auto_ivr,自动外呼系统\n@action(MessageAction.GET_IVR)\ndef get_ivr(payload, msg_id):\n logging.warning('start get_ivr')\n sys_config = (SystemConfig.select()\n .where(SystemConfig.key == 'DPD1-3_INTO_IVR')\n .first())\n # 得到所有的lid\n now = date.today()\n # 预期用户不再使用ivr,而是直接进入催收,故修改时间窗口不再获取预期数据\n if sys_config and sys_config.value:\n start = now - timedelta(days=3)\n else:\n start = now\n end = now + timedelta(days=4)\n # TODO: 使用redis\n item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()\n if not item:\n # 开始时清空ivr数据\n AutoIVR.delete().execute()\n current_page = 0\n elif item.current_page >= item.total_page:\n return\n else:\n current_page = item.current_page\n\n #逾期分组 appname + 逾期次数 + 逾期天数\n auto_ivr = {\n 'DanaCepat01': 1,\n 'DanaCepat00': 2,\n 'DanaCepat0PDP1': 3,\n 'PinjamUang01': 4,\n 'PinjamUang00': 5,\n 'PinjamUang0PDP1': 6,\n 'KtaKilat01': 7,\n 'KtaKilat00': 8,\n 'KtaKilat0PDP1': 9,\n 'DanaCepat11': 10,\n 'DanaCepat10': 11,\n 'DanaCepat1PDP1': 12,\n 'PinjamUang11': 13,\n 'PinjamUang10': 14,\n 'PinjamUang1PDP1': 15,\n 'KtaKilat11': 16,\n 'KtaKilat10': 17,\n 'KtaKilat1PDP1': 18,\n 'DanaCepat0PDP2': 19,\n 'DanaCepat0PDP3': 20,\n 'DanaCepat03': 21,\n 'PinjamUang0PDP2': 22,\n 'PinjamUang0PDP3': 23,\n 'PinjamUang03': 24,\n 'KtaKilat0PDP2': 25,\n 'KtaKilat0PDP3': 26,\n 'KtaKilat03': 27,\n 'DanaCepat1PDP2': 28,\n 'DanaCepat1PDP3': 29,\n 'PinjamUang1PDP2': 30,\n 'PinjamUang1PDP3': 31,\n 'KtaKilat1PDP2': 32,\n 'KtaKilat1PDP3': 33,\n 'DanaCepat13': 36,\n 'PinjamUang13': 37,\n 'KtaKilat13': 38,\n 'DanaCepat12': 39,\n 'PinjamUang12': 40,\n 'KtaKilat12': 41,\n 'DanaCepat02': 42,\n 'PinjamUang02': 43,\n 'KtaKilat02': 44,\n 'IKIDana01': 100,\n 'IKIDana00': 101,\n 'IKIDana0PDP1': 102,\n 'IKIDana11': 103,\n 'IKIDana10': 104,\n 'IKIDana1PDP1': 105,\n 'IKIDana0PDP2': 106,\n 'IKIDana0PDP3': 107,\n 'IKIDana03': 108,\n 'IKIDana1PDP2': 109,\n 'IKIDana1PDP3': 110,\n 'IKIDana13': 111,\n 'IKIDana12': 112,\n 'IKIDana02': 113,\n }\n current_page += 1\n with db.atomic() as transaction:\n while True:\n bill_service = BillService()\n #获取当天到未来4天的到期bill_sub.origin_due_at\n ivr_action = bill_service.ivr_pages(\n page=current_page,\n page_size=500,\n start_time=utc_datetime(str(start)),\n end_time=utc_datetime(str(end)))\n result = ivr_action['result']\n page_size = int(ivr_action.get('page_size', 0))\n total_page = int(ivr_action.get('total_page', 0))\n\n insert_args = []\n for a in result:\n due_at = no_utc_datetime(a['due_at'])\n days = (due_at.date() - now).days\n if days == 2:\n continue\n if days > 0:\n time = str(days)\n else:\n # 上面通过时间控制了请求的数据,不会获取到逾期为两天的件\n time = str(days).replace('-', 'PDP')\n\n #su 该用户逾期多少次\n key = a['app_name'] + str(a['su']) + time\n group = auto_ivr.get(key)\n\n user_id = a['user_id']\n try:\n user_resp = (AccountService()\n .get_user(path_params={'user_id': user_id}))\n if str(user_resp['mobile_no']) == str(a['user_mobile_no']):\n numbers = a['user_mobile_no']\n else:\n numbers = (a['user_mobile_no'] +\n ',' + user_resp.get('mobile_no'))\n except:\n logging.error('Request Account Service Error.')\n numbers = a['user_mobile_no']\n\n insert_args.append({\n 'application_id': a['id'],\n 'numbers': numbers,\n 'group': group,\n 'user_id': user_id})\n\n AutoIVR.insert_many(insert_args).execute()\n if current_page == 1:\n IVRActionLog.create(total_page=total_page,\n proc_date=now,\n page_size=page_size,\n current_page=current_page)\n # 不知道什么原因,此处create不返回刚创建的对象\n item = IVRActionLog.get(IVRActionLog.proc_date == now)\n else:\n item.current_page = current_page\n item.page_size = page_size\n item.total_page = total_page\n item.save()\n transaction.commit()\n current_page += 1\n if current_page > int(total_page):\n break\n # try:\n # ivr_t2_test()\n # except Exception as e:\n # logging.error(\"ivr_test_error:%s\"%str(e))\n\n if sys_config and sys_config.value:\n try:\n classfiy_dpd_ptp_apps()\n except Exception as e:\n logging.error(\"dpd1-3_test_error:%s\"%str(e))\n\n\n\n# t-2进ivr测试代码\ndef ivr_t2_test():\n t2_groups = [39, 40, 41, 42, 43, 44]\n ivr_test_proportion = 0.2\n sys_config = (SystemConfig.select()\n .where(SystemConfig.key == 'IVR_TEST_PROPORTION')\n .first())\n if sys_config and sys_config.value:\n ivr_test_proportion = float(sys_config.value)\n # 获取所有t-2的件\n t2_ivrs = (AutoIVR.select()\n .where(AutoIVR.group << t2_groups,\n AutoIVR.status == AutoIVRStatus.AVAILABLE.value))\n t2_dict = defaultdict(list)\n # 每个group获取一定比例的件\n for ivr in t2_ivrs:\n t2_dict[ivr.group].append(ivr.id)\n test_ivr_ids = []\n for group, ivr_ids in t2_dict.items():\n number = ceil(len(ivr_ids) * ivr_test_proportion)\n test_ivr_ids += ivr_ids[:number]\n if not test_ivr_ids:\n return\n # 更新ivr状态\n q = (AutoIVR.update(status=AutoIVRStatus.SUCCESS.value)\n .where(AutoIVR.group << t2_groups,\n AutoIVR.id.not_in(test_ivr_ids))\n .execute())\n\n# 过滤到bomber中下p的件\ndef classfiy_dpd_ptp_apps():\n dpd_group = AutoIVR.dpd_groups()\n dpd1_3_ivr_pro = 0.2\n sys_config = (SystemConfig.select()\n .where(SystemConfig.key == 'DPD1-3_IVR_TEST')\n .first())\n if sys_config and sys_config.value:\n dpd1_3_ivr_pro = float(sys_config.value)\n # 获取有是有已经下p的件\n apps = (ApplicationR.select(ApplicationR.external_id)\n .where(ApplicationR.overdue_days < 4,\n ApplicationR.status != ApplicationStatus.REPAID.value,\n ApplicationR.promised_date >= date.today(),\n ApplicationR.promised_date.is_null(False)))\n apps_ids = [a.external_id for a in apps]\n # 删除ivr中下p的件\n if apps_ids:\n d = (AutoIVR.delete()\n .where(AutoIVR.application_id.in_(apps_ids),\n AutoIVR.group.in_(dpd_group))\n .execute())\n # 所有dpd1-3的件\n ivrs = (AutoIVR.select().where(AutoIVR.group.in_(dpd_group)))\n ivrs_dict = defaultdict(list)\n for ivr in ivrs:\n ivrs_dict[ivr.group].append(ivr.id)\n test_ivrs = []\n for group, ivr_ids in ivrs_dict.items():\n number = ceil(len(ivr_ids) * dpd1_3_ivr_pro)\n test_ivrs += ivr_ids[:number]\n if not test_ivrs:\n return\n # 更新不测试的数据的状态\n q = (AutoIVR.update(status=AutoIVRStatus.SUCCESS.value)\n .where(AutoIVR.group.in_(dpd_group),\n AutoIVR.id.not_in(test_ivrs))\n .execute())\n\n\n\n# APP 合并特殊处理\n@action(MessageAction.APP_MERGE)\n@deprecated(version='1.0', reason='This function will be removed soon')\ndef app_merge(payload, msg_id):\n\n # 将DPD未到4的提前拉近bomber\n sql = \"\"\"\n select *\n from (\n select a.id as id\n from dashboard.application as a\n inner join repayment.bill2 as b on b.external_id = a.id\n where not exists (\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.app = 'DanaCepat'\n and a.is_first_loan = 1\n and a.apply_at < '2018-08-23 20:50:00'\n and b.overdue_days between 1 and 3\n and b.status != 2) result\n where not exists (\n select 1\n from bomber.application as a\n where a.cycle = 1\n and a.status = 4\n and a.id = result.id\n )\n \"\"\"\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n new_data = cursor.fetchall()\n cursor.close()\n if new_data:\n bomber = [103, 104]\n for d in new_data:\n app_id = {'id': d[0]}\n application_overdue(app_id, None)\n\n # 将新进的件随机分给对应催收员\n (Application\n .update(status=ApplicationStatus.AB_TEST.value,\n latest_bomber=random.choice(bomber),\n ptp_bomber=None\n )\n .where(Application.id == d[0])\n ).execute()\n logging.warning('add new app success')\n\n # 重新登陆后,且没有ptp,将其从人工催收中删除\n ptp = date.today() - timedelta(days=1)\n del_sql = \"\"\"\n select a.id\n from bomber.application as a\n where exists(\n select 1\n from battlefront.user_login_log as u\n where u.created_at > '2018-08-16'\n and u.user_id = a.user_id\n )\n and a.cycle = 1\n and a.status = 4\n and (a.promised_date is null or a.promised_date < \"%s\")\n \"\"\" % ptp\n cursor = readonly_db.get_cursor()\n cursor.execute(del_sql)\n del_date = cursor.fetchall()\n cursor.close()\n if del_date:\n return\n ids = list()\n for d in del_date:\n ids.append(d[0])\n (Application\n .update(status=ApplicationStatus.UNCLAIMED.value,\n latest_bomber=None)\n .where(Application.id << ids)).execute()\n\n\n@action(MessageAction.APPLICATION_BOMBER)\ndef application_overdue(payload, msg_id):\n\n application_id = payload['id']\n sub_bill_id = payload['bill_sub_id']\n local_app = (Application.select()\n .where(Application.external_id == application_id)\n .order_by(Application.finished_at)\n .first())\n # 如果是单期且催收单存在\n if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:\n logging.info('application %s overdue, already exists', application_id)\n add_contact(local_app)\n return\n # 如果是分期,查看子账单是否存在\n if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:\n overdue_bill = (OverdueBillR.select()\n .where(OverdueBillR.sub_bill_id == sub_bill_id,\n OverdueBillR.external_id == application_id))\n if overdue_bill.exists():\n logging.info(\n 'application %s,sub_bill_id %s overdue, already exists' %\n (application_id, sub_bill_id))\n return\n\n try:\n sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])\n sub_bill = sub_bill[0]\n except Exception:\n logging.error('application %s overdue, get sub_bill info failed:'\n 'Request To repayment Error', application_id)\n return\n\n if sub_bill['status'] == 2:\n logging.error('application %s overdue, but bills already cleared',\n application_id)\n return\n\n overdue_days = sub_bill.get('overdue_days', 0)\n if overdue_days == 0:\n logging.info('application {} no overdue'\n .format(str(application_id)))\n return\n\n gold_eye = GoldenEye().get('/applications/%s' % application_id)\n if not gold_eye.ok:\n logging.error('get application %s failed: Request to GoldenEye.',\n application_id)\n return\n gold_app = gold_eye.json().get('data')\n user_id = gold_app['user_id']\n\n apply_history = Dashboard().get('/users/%s/apply-history' % user_id)\n if not apply_history.ok:\n logging.error('get user %s apply history failed: Request '\n 'to Dashboard Failed.', user_id)\n return\n history = apply_history.json().get('data')\n loan_success_times = len([1 for i in history\n if i['status'] in [80, 90, 100, 70] and\n i['id'] != gold_app['id']])\n\n id = application_id\n type = ApplicationType.CASH_LOAN.value\n bill_id = sub_bill.get(\"bill_id\")\n amount = sub_bill.get(\"amount\")\n amount_net = sub_bill.get('amount_net')\n interest_rate = sub_bill.get('interest_rate')\n overdue_days = sub_bill.get('overdue_days')\n origin_due_at = sub_bill.get('origin_due_at')\n sub_overdue_bill = {\n \"collection_id\": id,\n \"bill_id\": bill_id,\n \"sub_bill_id\": sub_bill_id,\n \"periods\": sub_bill.get(\"periods\"),\n \"overdue_days\": overdue_days,\n \"origin_due_at\": origin_due_at,\n \"amount\": amount,\n \"amount_net\": amount_net,\n \"interest_rate\": interest_rate,\n \"external_id\": application_id\n }\n # 根据催收单类型来生成id\n if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:\n if local_app and local_app.status != ApplicationStatus.REPAID.value:\n sub_overdue_bill[\"collection_id\"] = local_app.id\n local_app.amount += amount\n local_app.amount_net += amount_net\n local_app.save()\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n logging.info(\n \"application %s,sub_bill_id:%s overdue created\" %\n (application_id, sub_bill_id))\n return\n else:\n id = idg()\n type = ApplicationType.CASH_LOAN_STAGING.value\n sub_overdue_bill[\"collection_id\"] = id\n\n ptp_info = BombingHistory.filter(BombingHistory.application == id).first()\n\n promised_amount = ptp_info and ptp_info.promised_amount\n promised_date = ptp_info and ptp_info.promised_date\n\n application = Application.create(\n id=id,\n user_id=gold_app['user_id'],\n user_mobile_no=gold_app['user_mobile_no'],\n user_name=gold_app['id_name'],\n app=gold_app['app'],\n device_no=gold_app['device_no'],\n contact=json.dumps(gold_app.get('contact')),\n apply_at=gold_app.get('apply_date'),\n\n id_ektp=gold_app.get('id_ektp'),\n birth_date=birth_dt_ktp(gold_app.get('id_ektp')),\n gender=gender_ktpnum(gold_app.get('id_ektp')),\n\n profile_province=(gold_app.get('profile_province') or {}).get('name'),\n profile_city=(gold_app.get('profile_city') or {}).get('name'),\n profile_district=(gold_app.get('profile_district') or {}).get('name'),\n profile_residence_time=gold_app.get('profile_residence_time'),\n profile_residence_type=gold_app.get('profile_residence_type'),\n profile_address=gold_app.get('profile_address'),\n profile_education=gold_app.get('profile_education'),\n profile_college=(gold_app.get('profile_college') or {}).get('name'),\n\n job_name=gold_app.get('job_name'),\n job_tel=gold_app.get('job_tel'),\n job_bpjs=gold_app.get('job_bpjs'),\n job_user_email=gold_app.get('job_user_email'),\n job_type=gold_app.get('job_type'),\n job_industry=gold_app.get('job_industry'),\n job_department=gold_app.get('job_department'),\n job_province=(gold_app.get('job_province') or {}).get('name'),\n job_city=(gold_app.get('job_city') or {}).get('name'),\n job_district=(gold_app.get('job_district') or {}).get('name'),\n job_address=gold_app.get('job_address'),\n\n amount=amount,\n amount_net=amount_net,\n interest_rate=interest_rate,\n # late_fee_rate=bill.get('late_fee_rate'),\n # late_fee_initial=late_fee_initial,\n # late_fee=late_fee,\n # interest=interest,\n term=gold_app.get('term'),\n origin_due_at=origin_due_at,\n # due_at=bill.get('due_at'),\n overdue_days=overdue_days,\n\n repay_at=sub_bill.get('repay_at'),\n # principal_paid=principal_paid,\n # late_fee_paid=late_fee_paid,\n # repaid=repaid,\n # unpaid=unpaid,\n\n loan_success_times=loan_success_times,\n arrived_at=datetime.now(),\n follow_up_date=datetime.now(),\n\n promised_amount=promised_amount,\n promised_date=promised_date,\n external_id=application_id,\n type=type,\n bill_id=bill_id,\n dpd1_entry=datetime.now()\n )\n\n new_overdue = OverdueBill.create(**sub_overdue_bill)\n\n logging.info('overdue application %s created', application_id)\n\n # new overdue application equals to 'escalate from 0 to 1'\n Escalation.create(\n application=id,\n type=EscalationType.AUTOMATIC.value,\n status=ApprovalStatus.APPROVED.value,\n current_cycle=0,\n escalate_to=1,\n )\n add_contact(application)\n\n\ndef add_contact(application):\n\n logging.info('start add contact for application: %s', application.id)\n\n # 添加联系人信息\n contacts = Contact.filter(\n Contact.user_id == application.user_id,\n )\n existing_numbers = {contact.number for contact in contacts}\n\n insert_contacts = list()\n\n mon_insert_contact = {}\n # applicant\n user_mobile_no = number_strip(application.user_mobile_no)\n if user_mobile_no and user_mobile_no not in existing_numbers:\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': application.user_name,\n 'number': user_mobile_no,\n 'relationship': Relationship.APPLICANT.value,\n 'source': 'apply info',\n 'real_relationship': Relationship.APPLICANT.value\n })\n existing_numbers.add(number_strip(application.user_mobile_no))\n\n extra_phone = GoldenEye().get(\n '/users/%s/extra-phone' % application.user_id\n )\n if not extra_phone.ok:\n extra_phone = []\n logging.error('get user %s extra contacts failed',\n application.user_id)\n else:\n extra_phone = extra_phone.json()['data']\n\n if extra_phone:\n for i in extra_phone:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': application.user_name,\n 'number': number,\n 'relationship': Relationship.APPLICANT.value,\n 'source': 'extra phone',\n 'real_relationship': Relationship.APPLICANT.value\n })\n key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n\n # family\n # ec contact\n ec_contact = []\n contact = json.loads(application.contact or '[]')\n for i in contact:\n if (number_strip(i['mobile_no']) not in existing_numbers and\n number_strip(i['mobile_no'])):\n ec_contact.append({\n 'user_id': application.user_id,\n 'name': i['name'],\n 'number': number_strip(i['mobile_no']),\n 'relationship': Relationship.FAMILY.value,\n 'sub_relation': SubRelation.EC.value,\n 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value\n })\n key = (user_mobile_no,\n number_strip(i['mobile_no']),\n ContactType.F_EC.value)\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['mobile_no']))\n if i['type'] != 1:\n continue\n if (number_strip(i['tel_no']) not in existing_numbers and\n number_strip(i['tel_no'])):\n ec_contact.append({\n 'user_id': application.user_id,\n 'name': i['name'],\n 'number': number_strip(i['tel_no']),\n 'relationship': Relationship.FAMILY.value,\n 'sub_relation': SubRelation.EC.value,\n 'source': FamilyContactType.EC.value,\n 'real_relationship': Relationship.FAMILY.value\n })\n key = (user_mobile_no,\n number_strip(i['tel_no']),\n ContactType.F_EC.value)\n mon_insert_contact[key] = 1, 0, i['name']\n existing_numbers.add(number_strip(i['tel_no']))\n\n if ec_contact:\n Contact.insert_many(ec_contact).execute()\n\n # company\n if all((application.job_tel,\n number_strip(application.job_tel),\n number_strip(application.job_tel) not in existing_numbers)):\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': None,\n 'number': number_strip(application.job_tel),\n 'relationship': Relationship.COMPANY.value,\n 'source': 'basic info job_tel',\n 'real_relationship': Relationship.COMPANY.value\n })\n key = (user_mobile_no,\n number_strip(application.job_tel),\n ContactType.C_BASIC_INFO_JOB_TEL.value)\n mon_insert_contact[key] = 1, 0, None\n existing_numbers.add(number_strip(application.job_tel))\n\n # suggested\n\n sms_contacts = GoldenEye().get(\n '/applications/%s/sms-contacts' % application.external_id\n )\n if not sms_contacts.ok:\n sms_contacts = []\n logging.info('get user %s sms contacts failed', application.external_id)\n else:\n sms_contacts = sms_contacts.json()['data']\n\n if sms_contacts:\n for i in sms_contacts:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': i['name'][:128],\n 'number': number,\n 'relationship': Relationship.SUGGESTED.value,\n 'source': 'sms contacts',\n 'real_relationship': Relationship.SUGGESTED.value\n })\n key = (user_mobile_no,\n number,\n ContactType.S_SMS_CONTACTS.value)\n mon_insert_contact[key] = 1, 0, i['name'][:128]\n existing_numbers.add(number)\n\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n\n cf = GoldenEye().get(\n '/applications/%s/call/frequency' % application.external_id\n )\n if not cf.ok:\n call_frequency = []\n logging.error('get application %s call frequency error',\n application.external_id)\n else:\n call_frequency = cf.json()['data']\n\n # 结构不一样,重新生成\n insert_contacts = []\n fm = GoldenEye().get(\n '/applications/%s/contact/family-member' % application.external_id\n )\n if not fm.ok:\n family = []\n logging.error('get application %s family-member info error',\n application.external_id)\n else:\n family = fm.json()['data']\n if family:\n for i in family:\n if not (i.get('number')):\n logging.info('family member %s' % str(i))\n continue\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n continue\n logging.info('family members: %s' % str(i))\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': i['name'][:128],\n 'number': number,\n 'relationship': Relationship.FAMILY.value,\n 'source': FamilyContactType.CALLEC.value,\n 'total_count': i.get('total_count', 1),\n 'total_duration': i.get('total_duration', 0),\n 'real_relationship': Relationship.FAMILY.value\n })\n key = user_mobile_no, number, ContactType.F_CALL_EC.value\n mon_insert_contact[key] = (i.get('total_count', 1),\n i.get('total_duration', 0),\n i['name'][:128])\n existing_numbers.add(number)\n\n mon_update_contact = {}\n if call_frequency:\n with db.atomic():\n count = 1\n for i in call_frequency:\n number = number_strip(i['number'])[:64]\n if not number:\n continue\n if number in existing_numbers:\n (Contact\n .update(total_count=i['total_count'],\n total_duration=i['total_duration'])\n .where(Contact.number == number,\n Contact.user_id == application.user_id))\n key = user_mobile_no, number\n mon_update_contact[key] = (i['total_count'],\n i['total_duration'])\n continue\n\n # 设置通话频率最多的五个为family member\n if count < 6:\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': i['name'][:128],\n 'number': number,\n 'relationship': Relationship.FAMILY.value,\n 'total_count': i['total_count'],\n 'total_duration': i['total_duration'],\n 'source': FamilyContactType.CALLTOP5.value,\n 'real_relationship': Relationship.FAMILY.value\n })\n count += 1\n key = user_mobile_no, number, ContactType.F_CALL_TOP5.value\n mon_insert_contact[key] = (i['total_count'],\n i['total_duration'],\n i['name'][:128])\n else:\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': i['name'][:128],\n 'number': number,\n 'relationship': Relationship.SUGGESTED.value,\n 'total_count': i['total_count'],\n 'total_duration': i['total_duration'],\n 'source': 'call frequency',\n 'real_relationship': Relationship.SUGGESTED.value\n })\n key = (user_mobile_no,\n number,\n ContactType.S_CALL_FREQUENCY.value)\n mon_insert_contact[key] = (i['total_count'],\n i['total_duration'],\n i['name'][:128])\n\n existing_numbers.add(number)\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n\n # 信用认证号码加入到本人\n next_apply_list = (AccountService().add_contact(application.user_id))\n\n for next_apply in next_apply_list:\n number = number_strip(str(next_apply))[:64]\n if number and number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=application.user_name,\n number=number,\n relationship=Relationship.SUGGESTED.value,\n source='online profile phone',\n real_relationship=Relationship.SUGGESTED.value\n )\n key = (user_mobile_no,\n number,\n ContactType.S_ONLINE_PROFILE_PHONE.value)\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n\n # 双卡手机另一个号码加入到本人队列\n next_applicant = GoldenEye().get(\n '/bomber/%s/dual_contact' % application.user_id\n )\n if not next_applicant.ok:\n next_applicant = []\n logging.error('get user %s dual_contact contacts failed'\n % application.user_id)\n else:\n next_applicant = next_applicant.json()['data']\n\n if next_applicant:\n for i in next_applicant:\n number = number_strip(str(i))[:64]\n if number and number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=application.user_name,\n number=number,\n relationship=Relationship.APPLICANT.value,\n source='apply info',\n real_relationship=Relationship.APPLICANT.value\n )\n key = user_mobile_no, number, ContactType.A_APPLY_INFO.value\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success' %\n application.user_id)\n\n # add new contact\n # 将同个ktp注册的多个号码添加到本人\n numbers = []\n try:\n numbers = (AccountService()\n .ktp_number(path_params={'user_id': application.user_id}))\n except Exception as e:\n logging.info('request ktp numbers failed %s' % str(e))\n\n for n in numbers:\n number = number_strip(str(n))[:64]\n if number and number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=application.user_name,\n number=number,\n relationship=Relationship.APPLICANT.value,\n source='ktp number',\n real_relationship=Relationship.APPLICANT.value\n )\n key = (user_mobile_no,\n number,\n ContactType.A_KTP_NUMBER.value)\n mon_insert_contact[key] = 1, 0, application.user_name\n existing_numbers.add(number)\n logging.info('get user %s dual_contact contacts success'\n % application.user_id)\n\n # 将contact表中is_family为true的标记为ec\n try:\n ecs = GoldenEye().get(\n '/applications/%s/contact/ec' % application.external_id\n )\n except Exception as e:\n logging.info('request ec-member error: %s' % str(e))\n try:\n if not ecs.ok:\n ec = []\n logging.info('get application %s ec-member info error',\n application.external_id)\n else:\n ec = ecs.json()['data']\n\n if ec:\n for e in ec:\n number = number_strip(e['numbers'])[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=e['name'][:128],\n number=number,\n relationship=Relationship.FAMILY.value,\n source=FamilyContactType.CONTACTEC.value,\n real_relationship=Relationship.FAMILY.value\n )\n key = (user_mobile_no,\n number,\n ContactType.F_CONTACT_EC.value)\n mon_insert_contact[key] = 1, 0, e['name'][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add ec_member error:%s' % str(e))\n\n # 将contact中is_me标记为true的标记为本人\n try:\n mn = GoldenEye().get(\n '/applications/%s/contact/my_number' % application.external_id\n )\n except Exception as e:\n logging.info('request my_number error: %s' % str(e))\n try:\n if not mn.ok:\n my = []\n logging.info('get application %s my_number info error',\n application.external_id)\n else:\n my = mn.json()['data']\n\n if my:\n for m in my:\n number = number_strip(m)[:64]\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=my[m][:128],\n number=number,\n relationship=Relationship.SUGGESTED.value,\n source='my number',\n real_relationship=Relationship.SUGGESTED.value\n )\n key = user_mobile_no, number, ContactType.S_MY_NUMBER.value\n mon_insert_contact[key] = 1, 0, my[m][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add my_member error:%s' % str(e))\n\n # 得到company的号码\n try:\n cn = GoldenEye().get(\n '/applications/%s/contact/company-number' % application.external_id\n )\n except Exception as e:\n logging.info('request company-number error: %s' % str(e))\n try:\n if not cn.ok:\n cn = []\n logging.info('get application %s company_number info error',\n application.external_id)\n else:\n cn = cn.json()['data']\n\n if cn:\n for c in cn:\n number = c\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=cn[c][:128],\n number=number,\n relationship=Relationship.COMPANY.value,\n source='company',\n real_relationship=Relationship.COMPANY.value\n )\n key = user_mobile_no, number, ContactType.C_COMPANY.value\n mon_insert_contact[key] = 1, 0, cn[c][:128]\n existing_numbers.add(number)\n except Exception as e:\n logging.info('add company_member error:%s' % str(e))\n\n # 得到本人在其他设备上登陆的sim联系方式,加入applicant中\n try:\n ol = (AccountService()\n .other_login_contact(userId=application.user_id))\n except Exception as e:\n logging.error('request other_login error: %s' % e)\n ol = {}\n\n try:\n for o in ol:\n number = number_strip(o)\n if not number:\n continue\n if number not in existing_numbers:\n Contact.create(\n user_id=application.user_id,\n name=ol[o][:128],\n number=number,\n relationship=Relationship.SUGGESTED.value,\n source='other_login',\n real_relationship=Relationship.SUGGESTED.value\n )\n key = (user_mobile_no,\n number,\n ContactType.S_OTHER_LOGIN.value)\n mon_insert_contact[key] = 1, 0, ol[o][:128]\n except Exception as e:\n logging.error('add other_login number error:%s' % e)\n\n logging.info('add contact for application %s finished', application.id)\n if mon_insert_contact or mon_update_contact:\n send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON,\n {\n 'user_mobile_no': user_mobile_no,\n 'insert_contact': str(mon_insert_contact),\n 'update_contact': str(mon_update_contact),\n 'user_id': application.user_id,\n 'name': application.user_name\n })\n\n\n@action(MessageAction.IMPORT_CONTACT_TO_MON)\ndef import_contact_to_mon(payload, msg_id):\n user_mobile_no = payload['user_mobile_no']\n insert_contact = eval(payload['insert_contact'])\n update_contact = eval(payload['update_contact'])\n user_id = payload['user_id']\n name = payload['name']\n\n if not (insert_contact or update_contact or user_mobile_no):\n logging.error(\"Invalid params\")\n drop_duplicated_contact({'numbers': [user_mobile_no]}, None)\n send_to_default_q(MessageAction.CONTACT_FROM_TOTAL, {\n 'number': user_mobile_no,\n 'user_id': user_id\n })\n return\n\n contacts = TotalContact.objects(src_number=user_mobile_no, is_calc=False)\n insert_list = []\n for c in contacts:\n key = (user_mobile_no, c.dest_number, c.source)\n if key in insert_contact:\n insert_contact.pop(key)\n\n for (sn, dn, s), (tc, td, na) in insert_contact.items():\n insert_list.append({\n 'src_number': sn,\n 'src_name': name,\n 'dest_number': dn,\n 'dest_name': na,\n 'source': s,\n 'total_count': tc,\n 'total_duration': td\n })\n\n if insert_list:\n insert_count = len((TotalContact\n .objects\n .insert([TotalContact(**dct)\n for dct in insert_list])))\n logging.info(\"insert success %s\", insert_count)\n\n update_count = 0\n for (sn, dn), (tc, td) in update_contact.items():\n result = (TotalContact\n .objects(src_number=sn, dest_number=dn, is_calc=False)\n .update(total_count=tc, total_duration=td))\n if result:\n update_count += 1\n logging.info(\"update success %s\", update_count)\n\n drop_duplicated_contact({'numbers': [user_mobile_no]}, None)\n send_to_default_q(MessageAction.CONTACT_FROM_TOTAL, {\n 'number': user_mobile_no,\n 'user_id': user_id\n })\n\n\n@action(MessageAction.DROP_DUPLICATED_CONTACT)\ndef drop_duplicated_contact(payload, msg_id):\n \"\"\"\n total_count,total_duration去重时,先total_count, 后total_duration\n\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n numbers = payload.get('numbers', [])\n if not numbers:\n logging.error(\"no numbers should drop\")\n\n query = (TotalContact\n .objects(Q(src_number__in=numbers) | Q(dest_number__in=numbers)))\n\n contact_list = defaultdict(list)\n delete_list = []\n insert_list = []\n for c in query:\n if c.src_number == c.dest_number:\n delete_list.append(c.id)\n\n key = c.src_number, c.dest_number, c.source\n contact_list[key].append({\n 'id': c.id,\n 'src_number': c.src_number,\n 'dest_number': c.dest_number,\n 'total_count': c.total_count,\n 'total_duration': c.total_duration,\n 'is_calc': c.is_calc,\n 'source': c.source,\n 'src_name': c.src_name,\n 'dest_name': c.dest_name\n })\n\n contact_list2 = deepcopy(contact_list)\n for key, info in contact_list.items():\n _info = sorted(info,\n key=lambda x: (not x['is_calc'],\n x['total_count'],\n x['total_duration']),\n reverse=True)\n rs = _info[0]\n if not rs['is_calc']:\n contact_list2[(key[1], key[0], key[2])].append({\n 'src_number': rs['dest_number'],\n 'dest_number': rs['src_number'],\n 'total_count': rs['total_count'],\n 'total_duration': rs['total_duration'],\n 'is_calc': True,\n 'source': rs['source'],\n 'id': '',\n 'src_name': rs['dest_name'],\n 'dest_name': rs['src_name']\n })\n delete_ids = [i['id'] for i in _info[1:] if i['id']]\n delete_list.extend(delete_ids)\n\n for key, info in contact_list2.items():\n _info = sorted(info,\n key=lambda x: (not x['is_calc'],\n x['total_count'],\n x['total_duration']),\n reverse=True)\n rs = _info[0]\n # 第一轮已经把不是反转的号码全部刷过\n if not rs['is_calc']:\n continue\n if not rs['id']:\n rs.pop('id')\n insert_list.append(rs)\n\n delete_ids = [i['id'] for i in _info[1:] if i['id']]\n delete_list.extend(delete_ids)\n\n if delete_list:\n delete_count = TotalContact.objects(id__in=delete_list).delete()\n logging.info(\"numbers %s: delete success %s\", numbers, delete_count)\n\n if insert_list:\n insert_count = len((TotalContact\n .objects\n .insert([TotalContact(**dct)\n for dct in insert_list])))\n logging.info(\"numbers %s: insert success %s\", numbers, insert_count)\n\n\ndef get_contact_from_mongo(number):\n if not number:\n return []\n\n query = (TotalContact\n .objects(src_number=number,\n source__in=TotalContact.available())\n .order_by('source'))\n lst = []\n for c in query:\n relation = TotalContact.relationship(c.source)\n if relation == -1:\n continue\n source = TotalContact.str_source(c.source)\n if not source:\n continue\n lst.append({\n 'related_number': c.dest_number,\n 'source': source,\n 'is_calc': c.is_calc,\n 'total_count': c.total_count,\n 'total_duration': c.total_duration,\n 'relation': relation,\n 'name': c.dest_name\n })\n return lst\n\n\n@action(MessageAction.CONTACT_FROM_TOTAL)\ndef contact_from_total(payload, msg_id):\n number = payload.get('number')\n user_id = payload.get('user_id')\n if not (number and user_id):\n logging.error(\"Invalid params\")\n return\n result = get_contact_from_mongo(number)\n if not result:\n logging.error(\"contact from mongo is none\")\n return\n\n contacts = Contact.filter(Contact.user_id == user_id)\n existing_numbers = {contact.number for contact in contacts}\n contact_list = []\n\n for c in result:\n number = number_strip(c['related_number'])\n if number in existing_numbers:\n continue\n\n contact_list.append({\n 'user_id': user_id,\n 'name': c['name'],\n 'number': number,\n 'relationship': c['relation'],\n 'source': c['source'],\n 'total_duration': c['total_duration'],\n 'total_count': c['total_count'],\n 'real_relationship': c['relation']\n })\n existing_numbers.add(number)\n\n if contact_list:\n Contact.insert_many(contact_list).execute()\n\n\n@action(MessageAction.BILL_REVOKE)\ndef bill_revoke(payload, msg_id):\n application_id = payload['external_id']\n if 'bill_sub_id' not in payload:\n bill_revoke_old(application_id)\n return\n # 子账单id\n sub_bill_id = payload['bill_sub_id']\n # java中还款时的唯一标志\n partner_bill_id = payload['partner_bill_id']\n\n application = (Application\n .filter(Application.external_id == application_id).first())\n\n if application.type == ApplicationType.CASH_LOAN_STAGING.value:\n # 根据子账单获取催收单的id\n application = (Application.select(Application)\n .join(OverdueBill,JOIN_LEFT_OUTER,\n on = Application.id == OverdueBill.collection_id)\n .where(OverdueBill.external_id == application_id,\n OverdueBill.sub_bill_id == sub_bill_id)\n .first())\n if not application:\n logging.info('application %s paid, not found application',\n application_id)\n return\n\n try:\n bill = BillService().sub_bill_list(bill_sub_ids = [sub_bill_id])\n bill = bill[0]\n except Exception:\n logging.error('application %s overdue, get bill info failed: '\n 'Request To Repayment Error', application_id)\n raise RuntimeError('Get repayment bills failed. {}'\n .format(str(application.id)))\n\n if bill.get('overdue_days') > 0 and bill.get('status') != 2:\n\n Application.update(\n status=ApplicationStatus.UNCLAIMED.value\n ).where(Application.id == application.id).execute()\n # 获取子账单\n overdue_bill = (OverdueBill\n .filter(OverdueBill.external_id == application_id,\n OverdueBill.sub_bill_id == sub_bill_id)\n .first())\n if not overdue_bill:\n logging.info(\"not find overdue_bill,sub_bill_id:%s,appid:%s\" %\n (sub_bill_id, application_id))\n return\n if overdue_bill.status == ApplicationStatus.REPAID.value:\n overdue_bill.status = ApplicationStatus.UNCLAIMED.value\n overdue_bill.finished_at = None\n overdue_bill.save()\n # 还款记录要置为无效\n RepaymentLog.update(\n no_active = 1\n ).where(RepaymentLog.partner_bill_id == partner_bill_id,\n RepaymentLog.overdue_bill_id == overdue_bill.id).execute()\n\n# 老数据消息处理\ndef bill_revoke_old(external_id):\n application = (Application.select()\n .where(Application.id == external_id)\n .first())\n if not application:\n logging.info(\"not get application\")\n return\n try:\n bill = BillService().bill_dict(\n application_id=external_id)\n except Exception:\n logging.error('application %s overdue, get bill info failed: '\n 'Request To Repayment Error', external_id)\n return\n if bill.get('overdue_days') >0 and bill.get(\"status\") != 2:\n q = (Application\n .update(status=ApplicationStatus.UNCLAIMED.value,\n repay_at=bill.get('repay_at'))\n .where(Application.id == external_id).execute())\n p = (OverdueBill.update(status=ApplicationStatus.UNCLAIMED.value)\n .where(OverdueBill.collection_id == external_id).execute())\n return\n\n\ndef check_key_not_none(payload, keys):\n for key in keys:\n if payload.get(key) is None:\n logging.error('Missing args {}'.format(str(key)))\n return False\n return True\n\n\n# 还款\n@action(MessageAction.BILL_PAID)\ndef bill_paid(payload, msg_id):\n # Don't use validator, it will throw exception\n validate = check_key_not_none(payload,\n ['external_id', 'late_fee_part',\n 'principal_part', 'paid_at','bill_sub_id',\n 'partner_bill_id'])\n if not validate:\n logging.error('payload key not fully pass in.')\n return\n\n external_id = payload['external_id']\n\n late_fee_part = Decimal(payload['late_fee_part'])\n principal_part = Decimal(payload['principal_part'])\n paid_at = payload['paid_at']\n partner_bill_id = payload['partner_bill_id']\n\n logging.debug('application %s paid principal part %s, paid late fee '\n 'part %s', external_id, principal_part, late_fee_part)\n\n application = (Application\n .filter(Application.external_id == external_id)\n .order_by(-Application.created_at)\n .first())\n if not application:\n logging.info('application %s paid, not found application',external_id)\n return\n\n # 获取期数\n sub_bill_id = payload['bill_sub_id']\n overdue_bill = (OverdueBillR.select()\n .where(OverdueBillR.collection_id == application.id,\n OverdueBillR.sub_bill_id == sub_bill_id)\n .first())\n if (application.type == ApplicationType.CASH_LOAN_STAGING.value\n and not overdue_bill):\n logging.info(\"bill sub not in bomber %s\",sub_bill_id)\n return\n with db.atomic():\n repay_at = str_no_utc_datetime(payload['latest_repay_at'])\n\n Application.update(\n repay_at=repay_at\n ).where(Application.id == application.id).execute()\n\n # 预测呼出系统上线后 全部认为 is_bombed = True\n\n RepaymentLog.create(\n application=application.id,\n is_bombed=True,\n current_bomber=application.latest_bomber_id,\n cycle=application.cycle,\n principal_part=principal_part,\n late_fee_part=late_fee_part,\n repay_at=paid_at,\n ptp_bomber=application.ptp_bomber,\n latest_call=application.latest_call,\n periods=overdue_bill.periods if overdue_bill else None,\n overdue_bill_id=overdue_bill.id if overdue_bill else None,\n partner_bill_id=partner_bill_id\n )\n\n # 智能催收 —— 催收号码进行排序\n phone_status = PhoneStatus.CONNECTED.value\n real_relationship = RealRelationship.user_values()\n commit = CallActionCommit.NO.value\n number = (CallActions.select(CallActions.number)\n .where(CallActions.phone_status == phone_status,\n CallActions.real_relationship << real_relationship,\n CallActions.commit == commit,\n CallActions.application == application.id)\n .order_by(-CallActions.created_at)\n .first())\n if number:\n (Contact.update(call_priority=PriorityStatus.REPAY.value)\n .where(Contact.user_id == application.user_id,\n Contact.call_priority == PriorityStatus.LAST.value)\n ).execute()\n\n (Contact.update(call_priority=PriorityStatus.LAST.value)\n .where(Contact.user_id == application.user_id,\n Contact.number == number.number)\n ).execute()\n\n if not application.latest_bomber_id:\n return\n\n Inbox.create(\n title='application %s,sub_bill_id %s repaid' % (\n application.external_id, sub_bill_id),\n content='application %s,sub_bill_id %s repaid' % (\n application.external_id, sub_bill_id),\n receiver=(application.latest_bomber_id or\n application.last_bomber_id),\n category=InboxCategory.REPAID.value,\n )\n\n\n@action(MessageAction.BILL_RELIEF)\ndef bill_relief(payload, msg_id):\n \"\"\"已废弃\"\"\"\n bill = payload['head_bill']\n\n repay_at = str_no_utc_datetime(bill['latest_repay_at'])\n updated_row = Application.update(\n repay_at=repay_at,\n ).where(Application.id == bill['external_id']).execute()\n\n logging.info('application %s bill relief done', bill['external_id'])\n return updated_row\n\n\n# 还款完成,\n@action(MessageAction.BILL_CLEARED)\n@action(MessageAction.BILL_CLEARED_BEFORE_CONFIRM)\ndef bill_cleared(payload, msg_id):\n \"\"\"\n BILL_CLEARED_BEFORE_CONFIRM仅在bomber系统中使用,MST清除账单时先修改其状态\n 为还款完成,让其不被催收\n \"\"\"\n external_id = payload.get('external_id')\n sub_bill_id = payload.get('bill_sub_id')\n if not external_id:\n logging.warning('payload has no external_id. {}'.format(str(payload)))\n return\n\n # 如果还清,清除不在拨打ivr\n AutoIVR.update(\n status=AutoIVRStatus.REPAID.value\n ).where(AutoIVR.application_id == external_id).execute()\n\n try:\n bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])\n bill = bill[0]\n except Exception:\n logging.error('get bill info failed: '\n 'Request To Repayment Error', external_id)\n return\n application = Application.filter(\n Application.external_id == external_id,\n Application.status << [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.BAD_DEBT.value,\n ApplicationStatus.AB_TEST.value]\n ).first()\n if not application:\n logging.info('application %s repay clear, not found bomber record',\n external_id)\n return\n\n with db.atomic():\n # 修改本次还清的自账单状态\n sub_bill_update = (OverdueBill.update(\n status = ApplicationStatus.REPAID.value,\n finished_at = datetime.now())\n .where(OverdueBill.collection_id == application.id,\n OverdueBill.sub_bill_id == sub_bill_id)\n .execute())\n # 如果是分期的件,判断是否完成还款\n overdue_bill = (OverdueBill.select()\n .where(OverdueBill.collection_id == application.id,\n OverdueBill.status != 2,\n OverdueBill.sub_bill_id != sub_bill_id))\n if overdue_bill.exists():\n if application.latest_bomber_id:\n Inbox.create(\n title='application %s sub_bill_id %s cleared' % (\n application.external_id, sub_bill_id),\n content='application %s sub_bill_id %s cleared' % (\n application.external_id, sub_bill_id),\n receiver=application.latest_bomber_id,\n category=InboxCategory.CLEARED.value,\n )\n return\n\n # 还款完成同步更新到外包\n partner = DispatchApp.filter(DispatchApp.application == application.id)\n if partner.exists():\n DispatchApp.update(\n status=DisAppStatus.ABNORMAL.value\n ).where(DispatchApp.application == application.id).execute()\n\n # 更新自动拨号系统队列 application 状态\n AutoCallList.update(\n status=AutoListStatus.REMOVED.value,\n description='bill clear'\n ).where(AutoCallList.application == application.id).execute()\n\n application.status = ApplicationStatus.REPAID.value\n application.finished_at = datetime.now()\n application.paid_at = datetime.now()\n # 如果逾期天数为0说明没有逾期,该件不应该进bomber\n if int(bill.get(\"overdue_days\")) <= 0:\n application.no_active = 1\n (RepaymentLog.update(no_active=1)\n .where(RepaymentLog.application == application.id)\n .execute())\n application.save()\n\n bomber_id = application.latest_bomber_id\n # c1b月底清件之后会入案,支付完成时要出案,2是默认的bomber_id\n if (application.cycle in (Cycle.C1A.value,Cycle.C1B.value) and\n not bomber_id):\n bomber_id = application.cycle\n if not bomber_id:\n return\n\n (DispatchAppHistory.update(\n out_at=datetime.now()\n ).where(\n DispatchAppHistory.application == application.id,\n DispatchAppHistory.bomber_id == bomber_id)).execute()\n\n if not application.latest_bomber_id:\n return\n\n item = (OldLoanApplication\n .get_or_none(OldLoanApplication.status ==\n OldLoanStatus.PROCESSING.value,\n OldLoanApplication.application_id ==\n application.id))\n if item:\n end_old_application(item, paid=True)\n out_record(src_bomber_id=bomber_id,\n application_ids=[item.application_id])\n\n Inbox.create(\n title='application %s cleared' % application.external_id,\n content='application %s cleared' % application.external_id,\n receiver=application.latest_bomber_id,\n category=InboxCategory.CLEARED.value,\n )\n\n\n# 同步bill2\n@action(MessageAction.OVERDUE_BILL_SYNC)\ndef overdue_bill_sync(payload, msg_id):\n \"\"\"已废弃\"\"\"\n bill2_list = payload\n updated_count = 0\n with db.atomic():\n for bill in bill2_list:\n\n principal = Decimal(bill['principal'])\n repay_at = str_no_utc_datetime(bill['latest_repay_at'])\n\n updated_count += Application.update(\n amount=principal,\n repay_at=repay_at,\n ).where(Application.id == bill['external_id']).execute()\n\n logging.info('overdue sync done, updated count: %s', updated_count)\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)\ndef calc_overdue_days_over(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n #更新逾期天数大于95天的件\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query = (Application\n .update(overdue_days=overdue_days)\n .where(Application.status <<\n [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.AB_TEST.value],\n Application.overdue_days > 95,\n Application.type == ApplicationType.CASH_LOAN.value))\n updated_rows_count = query.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count)\n\n try:\n calc_overdue_days_over_instalment()\n except Exception as e:\n logging.error(\"calc_overdue_days_over_instalment_error: %s\"%str(e))\n\n # 计算overdue_days后自动触发升级\n apps = Application.filter(\n Application.status << [ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value],\n Application.overdue_days > 95,\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(\n MessageAction.BOMBER_AUTOMATIC_ESCALATION,\n {'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n\n# 计算逾期天数超过95天的件的逾期天数\ndef calc_overdue_days_over_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.AB_TEST.value]\n for status in sub_bill_status_list:\n # 更新逾期天数\n query = (OverdueBill.update(overdue_days=overdue_days)\n .where(OverdueBill.status == status,\n OverdueBill.overdue_days > 95))\n updated_rows_count = query.execute()\n logging.info(\"calc_overdue_days_over_instalment done,count:%s,status:%s\" %\n (updated_rows_count, status))\n\n # 获取所有的子账单信息\n overdue_bills = (OverdueBill\n .select(OverdueBill.collection_id,\n OverdueBill.overdue_days)\n .join(Application, JOIN_LEFT_OUTER,\n on=OverdueBill.collection_id == Application.id)\n .where(Application.status == status,\n (Application.type ==\n ApplicationType.CASH_LOAN_STAGING.value)))\n # 获取每个分期催收单要更新的逾期天数\n app_update = {}\n for ob in overdue_bills:\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id], ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n # 更新催收单的逾期天数\n for aid, a_days in app_update.items():\n q = (Application.update(overdue_days=a_days)\n .where(Application.id == aid)\n .execute())\n logging.info(\"update instalment application done\")\n\n\n\n@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)\ndef calc_overdue_days(payload, msg_id):\n \"\"\"\n Call by BOMBER_CALC_SUMMARY\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n query_unclaimed = (Application\n .update(overdue_days=overdue_days)\n .where(Application.status ==\n ApplicationStatus.UNCLAIMED.value,\n Application.overdue_days <= 95,\n (Application.type ==\n ApplicationType.CASH_LOAN.value)))\n updated_rows_count_unclaimed = query_unclaimed.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_unclaimed)\n\n query_processing = (Application\n .update(overdue_days=overdue_days)\n .where(Application.status ==\n ApplicationStatus.PROCESSING.value,\n Application.overdue_days <= 95,\n (Application.type ==\n ApplicationType.CASH_LOAN.value)))\n updated_rows_count_processing = query_processing.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_processing)\n\n query_test = (Application\n .update(overdue_days=overdue_days)\n .where(Application.status ==\n ApplicationStatus.AB_TEST.value,\n Application.overdue_days <= 95,\n (Application.type ==\n ApplicationType.CASH_LOAN.value)))\n updated_rows_count_test = query_test.execute()\n logging.info('calc overdue days done, updated count: %s',\n updated_rows_count_test)\n\n # 分期账单计算逾期天数\n calc_overdue_days_instalment()\n\n # 计算overdue_days后自动触发升级\n apps = Application.select(Application.id).where(\n Application.status << [ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value],\n Application.overdue_days <= 95,\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) < datetime.today().date()))\n ids = [i.id for i in apps]\n for idx in range(0, len(ids), 100):\n send_to_default_q(\n MessageAction.BOMBER_AUTOMATIC_ESCALATION,\n {'application_list': ids[idx:idx + 100]})\n send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})\n\n # overdue_days 计算完成后,修改C1A_entry(预期天数为4的设为C1A)\n Application.update(\n C1A_entry=datetime.now()\n ).where(\n Application.status << [ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value,\n ApplicationStatus.AB_TEST.value],\n Application.overdue_days == 4\n ).execute()\n\n# 分期的件计算逾期天数\ndef calc_overdue_days_instalment():\n now = fn.NOW()\n origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)\n overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))\n sub_bill_status_list = [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.AB_TEST.value]\n # 获取当月第一天的时间\n today_now_time = datetime.now()\n month_first_day = today_now_time.replace(day=1,\n hour=1,\n minute=30,\n second=0,\n microsecond=0)\n for status in sub_bill_status_list:\n # 更新逾期天数\n query = (OverdueBill.update(overdue_days = overdue_days)\n .where(OverdueBill.status == status,\n OverdueBill.overdue_days <= 95))\n updated_rows_count = query.execute()\n logging.info(\"calc_overdue_days_instalment done,count:%s,status:%s\" %\n (updated_rows_count, status))\n\n # 获取所有的子账单信息\n overdue_bills = (OverdueBill\n .select(OverdueBill.status,\n OverdueBill.created_at,\n OverdueBill.collection_id,\n OverdueBill.overdue_days)\n .join(Application, JOIN_LEFT_OUTER,\n on=OverdueBill.collection_id == Application.id)\n .where(Application.status == status,\n (Application.type ==\n ApplicationType.CASH_LOAN_STAGING.value)))\n # 获取每个分期催收单要更新的逾期天数\n app_update = {}\n for ob in overdue_bills:\n # 排除到分期这个月之前还款完成的那一期\n if (ob.status == ApplicationStatus.REPAID.value and\n ob.created_at < month_first_day):\n continue\n if ob.collection_id not in app_update:\n app_update[ob.collection_id] = ob.overdue_days\n else:\n ob_days = max(app_update[ob.collection_id],ob.overdue_days)\n app_update[ob.collection_id] = ob_days\n # 更新催收单的逾期天数\n for aid,a_days in app_update.items():\n q = (Application.update(overdue_days = a_days)\n .where(Application.id == aid)\n .execute())\n logging.info(\"update instalment application done\")\n\n\n@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)\ndef automatic_escalation(payload, msg_id):\n app_ids = payload.get('application_list', [])\n if not app_ids:\n return\n # 过滤掉已完成的订单\n apps = (Application.select()\n .where(Application.id.in_(app_ids),\n Application.status != ApplicationStatus.REPAID.value))\n\n for a in apps:\n new_cycle = application_entry_different_calculations(a)\n if a.overdue_days < 90:\n logging.info(\n \"automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}\".format(\n a.id, new_cycle, a.cycle, a.overdue_days))\n if new_cycle > a.cycle:\n with db.atomic():\n if (a.latest_bomber_id or\n a.cycle in (Cycle.C1A.value, Cycle.C1B.value)):\n bomber_id = (a.latest_bomber_id\n if a.latest_bomber_id else a.cycle)\n (DispatchAppHistory.update(\n out_at=datetime.now(),\n out_overdue_days=a.overdue_days,\n ).where(\n DispatchAppHistory.application == a.id,\n DispatchAppHistory.bomber_id == bomber_id\n )).execute()\n\n Escalation.create(\n application=a.id,\n type=EscalationType.AUTOMATIC.value,\n status=ApprovalStatus.APPROVED.value,\n current_cycle=a.cycle,\n escalate_to=new_cycle,\n current_bomber_id=a.latest_bomber,\n )\n\n # 升级的时候如果是外包的件更新dispatch_app中的状态\n dis_app_update = (DispatchApp\n .update(status = DisAppStatus.ABNORMAL.value)\n .where(DispatchApp.application == a.id))\n dis_app_update.execute()\n a.cycle = new_cycle\n a.last_bomber = a.latest_bomber\n a.status = ApplicationStatus.UNCLAIMED.value\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n # 升级之后 拨打次数清零\n a.called_times = 0\n if new_cycle == Cycle.C1B.value:\n a.C1B_entry = datetime.now()\n elif new_cycle == Cycle.C2.value:\n a.C2_entry = datetime.now()\n elif new_cycle == Cycle.C3.value:\n a.C3_entry = datetime.now()\n a.save()\n logging.info('automatic escalation done')\n\n# 把部分件的进入C1B的时间改为10天\ndef application_entry_different_calculations(app):\n conf = {\n 1: [1, 10],\n 2: [11, 30],\n 3: [31, 60],\n 4: [61, 90],\n 5: [91, 999999],\n }\n for new_cycle,scopes in conf.items():\n if scopes[0] <= app.overdue_days <= scopes[1]:\n return new_cycle\n return app.cycle\n\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY)\ndef cron_summary(payload, msg_id):\n \"\"\"已废弃\"\"\"\n employees = Bomber.select(Bomber, Role).join(Role)\n summary = {\n i.id: {\n 'cycle': i.role.cycle,\n 'claimed': 0,\n 'completed': 0,\n 'cleared': 0,\n 'escalated': 0,\n 'transferred': 0,\n 'promised': 0,\n 'amount_recovered': Decimal(0),\n 'calls_made': 0,\n 'calls_connected': 0,\n 'sms_sent': 0,\n }\n for i in employees\n }\n # 每天 2点 15分 计算 昨天的情况\n now_date = date.today()\n cal_date = now_date - timedelta(days=1)\n # 当日下了多少ptp\n claimed = (Application\n .select(Application.latest_bomber,\n fn.COUNT(Application.id).alias('claimed'))\n .where(fn.DATE(Application.claimed_at) == cal_date,\n Application.status <<\n [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.REPAID.value],\n Application.latest_bomber.is_null(False))\n .group_by(Application.latest_bomber))\n\n # 当日ptp还款件数目\n cleared = (Application\n .select(Application.latest_bomber,\n fn.COUNT(Application.id).alias('cleared'))\n .where(fn.DATE(Application.finished_at) == cal_date,\n Application.status == ApplicationStatus.REPAID.value,\n Application.latest_bomber.is_null(False))\n .group_by(Application.latest_bomber))\n\n # 当日有多少个ptp被维护\n completed = (Application\n .select(Application.latest_bomber,\n fn.COUNT(Application.id).alias('completed'))\n .where(Application.latest_bombing_time.is_null(False),\n fn.DATE(Application.latest_bombing_time) == cal_date,\n Application.latest_bomber.is_null(False))\n .group_by(Application.latest_bomber))\n\n # 手工维护的件多少个件进入下一个cycle\n escalated = (Escalation\n .select(Escalation.current_bomber,\n fn.COUNT(Escalation.id).alias('escalated'))\n .where(fn.DATE(Escalation.created_at) == cal_date,\n Escalation.type == EscalationType.AUTOMATIC.value,\n Escalation.current_bomber.is_null(False),\n Escalation.status == ApprovalStatus.APPROVED.value)\n .group_by(Escalation.current_bomber))\n\n # 当日从某人手上移出多少个件\n transferred = (Transfer\n .select(Transfer.operator,\n fn.COUNT(Transfer.id).alias('transferred'))\n .where(fn.DATE(Transfer.reviewed_at) == cal_date,\n Transfer.status == ApprovalStatus.APPROVED.value)\n .group_by(Transfer.operator))\n\n # 当天的下p件有多少有进展\n promised = (\n BombingHistory\n .select(BombingHistory.bomber,\n fn.COUNT(BombingHistory.id).alias('promised'))\n .where(fn.DATE(BombingHistory.created_at) == cal_date,\n BombingHistory.result == BombingResult.HAS_PROGRESS.value)\n .group_by(BombingHistory.bomber)\n )\n\n # 当天催回的金额\n amount_recovered = (RepaymentLog\n .select(RepaymentLog.current_bomber,\n fn.SUM(RepaymentLog.principal_part)\n .alias('principal_part'),\n fn.SUM(RepaymentLog.late_fee_part)\n .alias('late_fee_part'))\n .where(fn.DATE(RepaymentLog.repay_at) == cal_date,\n RepaymentLog.is_bombed == True,\n RepaymentLog.current_bomber.is_null(False))\n .group_by(RepaymentLog.current_bomber))\n\n # calllog表已废弃\n calls_made = (CallLog\n .select(CallLog.user_id,\n fn.COUNT(CallLog.record_id).alias('calls_made'))\n .where(fn.DATE(CallLog.time_start) == cal_date,\n CallLog.system_type == '1')\n .group_by(CallLog.user_id))\n\n # calllog表已废弃\n calls_connected = (CallLog\n .select(CallLog.user_id,\n fn.COUNT(CallLog.record_id)\n .alias('calls_connected'))\n .where(fn.DATE(CallLog.time_start) == cal_date,\n CallLog.duration > 10,\n CallLog.system_type == '1').\n group_by(CallLog.user_id))\n\n # 当天发送的所有短信\n sms_sent = (ConnectHistory\n .select(ConnectHistory.operator,\n fn.COUNT(ConnectHistory.id).alias('sms_sent'))\n .where(ConnectHistory.type.in_(ConnectType.sms()),\n ConnectHistory.created_at >= cal_date,\n ConnectHistory.created_at < now_date\n )\n .group_by(ConnectHistory.operator))\n\n for i in claimed:\n summary[i.latest_bomber_id]['claimed'] += i.claimed\n\n for i in completed:\n summary[i.latest_bomber_id]['completed'] += i.completed\n\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n\n for i in escalated:\n summary[i.current_bomber_id]['escalated'] += i.escalated\n\n for i in transferred:\n summary[i.operator_id]['transferred'] += i.transferred\n\n for i in promised:\n summary[i.bomber_id]['promised'] += i.promised\n\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n\n for i in calls_made:\n summary[int(i.user_id)]['calls_made'] += i.calls_made\n\n for i in calls_connected:\n summary[int(i.user_id)]['calls_connected'] += i.calls_connected\n\n for i in sms_sent:\n summary[i.operator_id]['sms_sent'] += i.sms_sent\n\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({\n 'bomber': bomber_id,\n 'cycle': data['cycle'],\n 'claimed': data['claimed'],\n 'completed': data['completed'],\n 'cleared': data['cleared'],\n 'escalated': data['escalated'],\n 'transferred': data['transferred'],\n 'promised': data['promised'],\n 'amount_recovered': data['amount_recovered'],\n 'calls_made': data['calls_made'],\n 'calls_connected': data['calls_connected'],\n 'sms_sent': data['sms_sent'],\n 'date': cal_date,\n })\n\n if insert_args:\n Summary.insert_many(insert_args).execute()\n\n cycle_args = []\n # cal new in\n # 按照 cycle 统计\n escalated_in = (Escalation\n .select(Escalation.escalate_to,\n fn.COUNT(Escalation.id).alias('escalated_in'))\n .where(Escalation.status == ApprovalStatus.APPROVED.value,\n fn.DATE(Escalation.created_at) == cal_date)\n .group_by(Escalation.escalate_to))\n\n for i in escalated_in:\n cycle_args.append({\n 'cycle': i.escalate_to,\n 'escalated_in': i.escalated_in,\n 'date': cal_date,\n })\n\n amount_recovered_total = (\n RepaymentLog\n .select(RepaymentLog.cycle,\n fn.SUM(RepaymentLog.principal_part).alias('principal_part'),\n fn.SUM(RepaymentLog.late_fee_part).alias('late_fee_part'))\n .where(fn.DATE(RepaymentLog.repay_at) == cal_date)\n .group_by(RepaymentLog.cycle)\n )\n\n for i in amount_recovered_total:\n amount_recovered_total = i.principal_part + i.late_fee_part\n cycle_args.append({\n 'cycle': i.cycle,\n 'amount_recovered_total': amount_recovered_total,\n 'date': cal_date,\n })\n if cycle_args:\n Summary.insert_many(cycle_args).execute()\n\n logging.info('cal summary done')\n\n # 报表计算结束后 再更新逾期天数 触发自动升级\n send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})\n\n\n@action(MessageAction.BOMBER_CALC_SUMMARY2)\ndef cron_summary2(payload, msg_id):\n \"\"\"已废弃,定时任务还在执行,具体情况待确定\"\"\"\n cal_date = date.today() - timedelta(days=1)\n employees = Bomber.select(Bomber, Role).join(Role)\n auto_call_actions = (\n AutoCallActions\n .select(\n AutoCallActions.bomber,\n AutoCallActions.result,\n fn.COUNT(AutoCallActions.id).alias('count')\n )\n .where(fn.DATE(AutoCallActions.created_at) == cal_date)\n )\n\n amount_recovered = (RepaymentLog\n .select(RepaymentLog.current_bomber,\n fn.SUM(RepaymentLog.principal_part)\n .alias('principal_part'),\n fn.SUM(RepaymentLog.late_fee_part)\n .alias('late_fee_part'))\n .where(fn.DATE(RepaymentLog.repay_at) == cal_date,\n RepaymentLog.current_bomber.is_null(False),\n RepaymentLog.is_bombed == True))\n\n cleared = (Application\n .select(Application.latest_bomber,\n fn.COUNT(Application.id).alias('cleared'))\n .where(fn.DATE(Application.finished_at) == cal_date,\n Application.status == ApplicationStatus.REPAID.value,\n Application.latest_bomber.is_null(False)))\n\n auto_call_actions = auto_call_actions.group_by(\n AutoCallActions.bomber, AutoCallActions.result\n )\n amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)\n cleared = cleared.group_by(Application.latest_bomber)\n\n summary = {\n e.id: {\n 'cycle': e.role.cycle,\n 'answered_calls': 0,\n 'ptp': 0,\n 'follow_up': 0,\n 'not_useful': 0,\n 'cleared': 0,\n 'amount_recovered': 0,\n }\n for e in employees\n }\n for a in auto_call_actions:\n summary[a.bomber_id]['answered_calls'] += a.count\n if a.result == AutoCallResult.PTP.value:\n summary[a.bomber_id]['ptp'] += a.count\n if a.result == AutoCallResult.FOLLOW_UP.value:\n summary[a.bomber_id]['follow_up'] += a.count\n if a.result == AutoCallResult.NOT_USEFUL.value:\n summary[a.bomber_id]['not_useful'] += a.count\n\n for i in amount_recovered:\n amount_recovered = i.principal_part + i.late_fee_part\n summary[i.current_bomber_id]['amount_recovered'] += amount_recovered\n\n for i in cleared:\n summary[i.latest_bomber_id]['cleared'] += i.cleared\n\n insert_args = []\n for bomber_id, data in summary.items():\n insert_args.append({\n 'bomber': bomber_id,\n 'cycle': data['cycle'],\n 'answered_calls': data['answered_calls'],\n 'ptp': data['ptp'],\n 'follow_up': data['follow_up'],\n 'not_useful': data['not_useful'],\n 'cleared': data['cleared'],\n 'amount_recovered': str(data['amount_recovered']),\n 'date': cal_date,\n })\n\n if insert_args:\n Summary2.insert_many(insert_args).execute()\n\n\n@action(MessageAction.BOMBER_SYNC_CONTACTS)\ndef sync_suggested_contacts(payload, msg_id):\n \"\"\" suggested contacts sync \"\"\"\n\n applications = (Application\n .select(Application.id, Application.user_id)\n .where(Application.status <<\n [ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value]))\n\n logging.debug('start sync contact')\n for a in applications:\n sync_contacts(a)\n logging.info('contact sync finished')\n\n\ndef sync_contacts(application):\n logging.info('application %s start sync contact', application.id)\n\n # 添加联系人信息\n contacts = Contact.filter(Contact.user_id == application.user_id)\n existing_numbers = {contact.number for contact in contacts}\n\n # sms contacts\n insert_contacts = []\n sms_contacts = GoldenEye().get(\n '/applications/%s/sms-contacts' % application.external_id\n )\n if not sms_contacts.ok:\n sms_contacts = []\n logging.info('get user %s sms contacts failed', application.external_id)\n else:\n sms_contacts = sms_contacts.json()['data']\n\n for i in sms_contacts:\n if i['number'] in existing_numbers:\n continue\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': i['name'],\n 'number': i['number'],\n 'relationship': Relationship.SUGGESTED.value,\n 'source': 'sms contacts',\n 'real_relationship': Relationship.SUGGESTED.value\n })\n existing_numbers.add(i['number'])\n\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n\n # call frequency\n insert_contacts = []\n cf = GoldenEye().get(\n '/applications/%s/call/frequency' % application.external_id\n )\n if not cf.ok:\n call_frequency = []\n logging.error('get application %s call frequency error',\n application.external_id)\n else:\n call_frequency = cf.json()['data']\n\n with db.atomic():\n for i in call_frequency:\n if i['number'] in existing_numbers:\n (Contact\n .update(total_count=i['total_count'],\n total_duration=i['total_duration'])\n .where(Contact.number == i['number'],\n Contact.user_id == application.user_id))\n continue\n\n insert_contacts.append({\n 'user_id': application.user_id,\n 'name': i['name'],\n 'number': i['number'],\n 'relationship': Relationship.SUGGESTED.value,\n 'total_count': i['total_count'],\n 'total_duration': i['total_duration'],\n 'source': 'call frequency',\n 'real_relationship': Relationship.SUGGESTED.value\n })\n if insert_contacts:\n Contact.insert_many(insert_contacts).execute()\n\n\n@action(MessageAction.BOMBER_AUTO_SMS)\n@deprecated(version='1.0', reason='This function will be removed soon')\ndef bomber_auto_sms(payload, msg_id):\n day_diff = int(payload['day_diff'])\n custom_type = payload.get('custom_type')\n msg_type = payload['msg_type']\n logging.info('auto sms %s sending', msg_type)\n\n applications = (\n Application\n .select()\n .where(Application.overdue_days == day_diff,\n Application.status << [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value],\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) < datetime.today().date()))\n )\n\n if custom_type == 'new':\n applications = applications.where(Application.loan_success_times < 3)\n if custom_type == 'old':\n applications = applications.where(Application.loan_success_times >= 3)\n\n templates = (\n Template.select(Template.text, Template.app)\n .where(Template.type == ConnectType.AUTO_SMS.value,\n Template.id << Template.get_auto_sms_tpl(msg_type))\n )\n tpl_text = dict()\n for tpl in templates:\n tpl_text[tpl.app] = tpl.text\n\n data_list = []\n for a in applications:\n tpl_data = {\n 'user_name': a.user_name,\n 'due_days': a.overdue_days,\n 'app_name': a.app,\n 'phone': a.user_mobile_no,\n 'cs_number': cs_number_conf.get(a.app, '02150202889'),\n }\n content = tpl_text[a.app].format(**tpl_data)\n data_list.append({\n 'phone': '62' + a.user_mobile_no,\n 'content': content,\n 'app': a.app,\n })\n\n if not data_list:\n logging.info('auto sms %s do not need sending', msg_type)\n return\n\n send_sms(data_list, msg_type, SmsChannel.NUSA.value)\n\n\n@action(MessageAction.BOMBER_AUTO_MESSAGE_DAILY)\ndef bomber_auto_message_daily(payload, msg_id):\n app_dict = dict(zip(AppName.keys(), AppName.values()))\n\n #当天自动外呼成功的电话记录\n auto_call_list = AutoCallActionsR \\\n .select(AutoCallActionsR.application_id) \\\n .where(fn.DATE(AutoCallActionsR.created_at) == fn.CURDATE())\n applications = (\n ApplicationR\n .select()\n .where(ApplicationR.overdue_days < 30,\n ApplicationR.overdue_days > 4,\n ApplicationR.type == ApplicationType.CASH_LOAN.value,\n ApplicationR.status << [ApplicationStatus.PROCESSING.value,\n ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.AB_TEST.value],\n ApplicationR.promised_date.is_null(True) |\n (fn.DATE(ApplicationR.promised_date) < datetime.today().date()),\n ~(ApplicationR.id << auto_call_list))\n )\n stage_list1 = range(*AutoCallMessageCycle.NEW_STAGE1.value['scope'], 3) #5,8,11,14\n stage_list2 = range(*AutoCallMessageCycle.STAGE2.value['scope'], 3) #15,18\n stage_list3 = range(*AutoCallMessageCycle.STAGE3.value['scope'], 3)\n sms_list = defaultdict(list)\n fcm_list = defaultdict(list)\n for a in applications:\n overdue_type = ''\n if a.overdue_days in stage_list1:\n if a.loan_success_times < 3:\n overdue_type = AutoCallMessageCycle.NEW_STAGE1.value['type']\n else:\n overdue_type = AutoCallMessageCycle.OLD_STAGE1.value['type']\n if a.overdue_days in stage_list2:\n overdue_type = AutoCallMessageCycle.STAGE2.value['type']\n if a.overdue_days in stage_list3:\n overdue_type = AutoCallMessageCycle.STAGE3.value['type']\n if overdue_type == '':\n continue\n # format app name\n app_name = app_dict.get(a.app.upper(), AppName.default().value)\n try:\n tpl_id = Template.get_daily_auto_sms_tpl(overdue_type, app_name)\n except KeyError:\n logging.warning('Key error {}, id is {}'.format(\n (overdue_type, app_name), a.id))\n continue\n data_map = {\n 'user_name': a.user_name,\n 'app_name': app_name,\n 'overdue_days': a.overdue_days,\n 'cs_number': cs_number_conf.get(a.app, '')\n }\n sms_list[(overdue_type, tpl_id, a.app)].append({\n 'receiver': '62' + a.user_mobile_no,\n 'data_map': data_map\n })\n fcm_list[(overdue_type, tpl_id, a.app)].append({\n 'receiver': a.user_id,\n 'data_map': data_map\n })\n\n for (msg_type, tpl_id, app_name), data_list in sms_list.items():\n auto_send_sms_and_fcm(data_list, tpl_id, app_name, \"SMS\")\n for (msg_type, tpl_id, app_name), data_list in sms_list.items():\n auto_send_sms_and_fcm(data_list, tpl_id, app_name, \"FCM\")\n\n\n#分期逾期短信\n@action(MessageAction.BOMBER_INSTALMENT_AUTO_MESSAGE_DAILY)\ndef bomber_instalment_auto_message_daily(payload, msg_id):\n applications = (ApplicationR.select(ApplicationR.id,\n ApplicationR.app,\n ApplicationR.user_id,\n ApplicationR.user_name,\n ApplicationR.user_mobile_no,\n ApplicationR.loan_success_times,\n OverdueBillR.status,\n OverdueBillR.sub_bill_id,\n OverdueBillR.overdue_days, )\n .join(OverdueBillR, JOIN_LEFT_OUTER,\n on=ApplicationR.id == OverdueBillR.collection_id)\n .where(ApplicationR.type ==\n ApplicationType.CASH_LOAN_STAGING.value,\n ApplicationR.status != ApplicationStatus.REPAID.value,\n ApplicationR.overdue_days < 90,\n ApplicationR.promised_date.is_null(True) |\n (fn.DATE(\n ApplicationR.promised_date) < datetime.today().date()),\n )\n .dicts())\n # 计算真实的逾期天数和欠款情况\n app_overdues = {}\n for app in applications:\n if app[\"status\"] == ApplicationStatus.REPAID.value:\n continue\n if app[\"id\"] in app_overdues:\n overdue_days = app_overdues[app[\"id\"]][\"overdue_days\"]\n app_overdues[app[\"id\"]][\"overdue_days\"] = max(app[\"overdue_days\"],\n overdue_days)\n app_overdues[app[\"id\"]][\"bill_sub_ids\"].append(app[\"sub_bill_id\"])\n else:\n app_overdues[app[\"id\"]] = {\n \"app_name\": app[\"app\"],\n \"user_id\": app[\"user_id\"],\n \"user_name\": app[\"user_name\"],\n \"overdue_days\": app[\"overdue_days\"],\n \"bill_sub_ids\": [app[\"sub_bill_id\"]],\n \"phone\": '62' + app[\"user_mobile_no\"],\n \"loan_success_times\": app[\"loan_success_times\"],\n \"cs_number\": cs_number_conf.get(app[\"app\"], '02150202889')\n }\n # 获取需要发短信的催收单和计算对应的未支付金额\n sms_dict = {}\n sub_bill_ids = []\n send_message = defaultdict(list)\n send_fcm = defaultdict(list)\n for aid, app in app_overdues.items():\n message_id = Template.get_daily_instalment_auto_sms_tpl(\n overdue_days=app[\"overdue_days\"],\n loan_times=app[\"loan_success_times\"]\n )\n if message_id:\n app[\"tpl_id\"] = message_id\n sms_dict[aid] = app\n sub_bill_ids.extend(app[\"bill_sub_ids\"])\n if not sms_dict:\n logging.info(\"no application need send sms\")\n return\n sub_bills = []\n try:\n for index in range(0,len(sub_bill_ids),30):\n sub_bill = BillService().sub_bill_list(\n bill_sub_ids=sub_bill_ids[index:index+30])\n sub_bills += sub_bill\n except Exception as e:\n logging.info(\"send sms get bill error:%s\" % str(e))\n return\n sub_bills_dict = {int(sb[\"id\"]): sb for sb in sub_bills}\n for aid, app in sms_dict.items():\n amount = 0\n for sbid in app[\"bill_sub_ids\"]:\n amount += sub_bills_dict.get(sbid, {}).get(\"unpaid\", 0)\n data_map = {\n \"user_name\": app[\"user_name\"],\n \"app_name\": app[\"app_name\"],\n \"overdue_days\": app[\"overdue_days\"],\n \"cs_number\": app[\"cs_number\"],\n \"amount\": str(amount)\n }\n send_message[(app['tpl_id'], app[\"app_name\"])].append({\n \"receiver\": app[\"phone\"],\n \"data_map\": data_map\n })\n send_fcm[(app['tpl_id'], app[\"app_name\"])].append({\n \"receiver\": app[\"user_id\"],\n \"data_map\": data_map\n })\n for (tpl_id, app_name), data_list in send_message.items():\n auto_send_sms_and_fcm(data_list, tpl_id, app_name, \"SMS\")\n for (msg_type, tpl_id, app_name), data_list in send_fcm.items():\n auto_send_sms_and_fcm(data_list, tpl_id, app_name, \"FCM\")\n\n\n\ndef auto_send_sms_and_fcm(data_list, tpl_id, app_name, message_type):\n if not data_list:\n return\n # 200 条 一次请求\n for idx in range(0, len(data_list), 200):\n request_json = {\n \"app_name\": app_name,\n \"failed_retry\": True,\n \"is_masking\": True,\n \"list\": data_list[idx: idx+200],\n \"message_level\": 1,\n \"message_type\": message_type,\n \"sms_type\": 4 if message_type == \"SMS\" else 0,\n \"type_id\": tpl_id\n }\n try:\n result = MessageService().send_batch_template(**request_json)\n if not result.get(\"result\"):\n logging.error()\n except Exception as e:\n logging.error()\n return\n logging.info(\"\")\n\n\ndef get_danamall_msg_service(app_name, message_service):\n if app_name == AppName.DANAMALL.value:\n # token = app.config['service.message.%s.token' % app_name.lower()]\n message_service = Message(version=app_name)\n return message_service\n\n\n#催收员发送短信,提醒承诺时间\n@action(MessageAction.BOMBER_REMIND_PROMISE)\ndef bomber_remind_promise(payload, msg_id):\n day_diff = int(payload['day_diff'])\n msg_type = payload['msg_type']\n logging.info('auto sms %s sending', msg_type)\n\n applications = (\n Application\n .select()\n .where(\n fn.DATEDIFF(fn.NOW(), Application.promised_date) == day_diff,\n Application.status << [\n ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value,\n ]\n )\n )\n\n templates = (\n Template\n .select(Template.text, Template.app)\n .where(Template.type == ConnectType.AUTO_SMS.value,\n Template.id << Template.get_auto_sms_tpl(msg_type))\n )\n\n tpl_text = {tpl.app: tpl.text for tpl in templates}\n message_date_dict = defaultdict(list)\n for a in applications:\n tpl_data = {\n 'user_name': a.user_name,\n 'due_days': a.overdue_days,\n 'app_name': a.app,\n 'phone': a.user_mobile_no,\n 'cs_number': cs_number_conf.get(a.app, '02150202889'),\n 'promised_date': a.promised_date.strftime('%d-%m-%Y'),\n }\n content = tpl_text[a.app].format(**tpl_data)\n message_date_dict[a.app].append(\n {\n \"content\": content,\n \"receiver\": '62' + a.user_mobile_no,\n \"title\": \"\"\n }\n )\n\n for app_name, data_list in message_date_dict.items():\n send_sms(data_list, msg_type, app_name)\n\n\n\n@action(MessageAction.BOMBER_DISCOUNT_APPROVED)\ndef bomber_discount_approved(payload, msg_id):\n app_id = payload['id']\n msg_type = payload['msg_type']\n discount_to = payload['discount_to']\n effective_to = payload['effective_to']\n\n application = Application.filter(Application.id == app_id).first()\n if not application:\n logging.error('discount approved msg send failed '\n 'application %s not found', app_id)\n return\n template = (\n Template\n .select(Template.text, Template.app)\n .where(Template.type == ConnectType.AUTO_SMS.value,\n Template.id << Template.get_auto_sms_tpl(msg_type),\n Template.app == application.app)\n .first()\n )\n if not template:\n logging.error('discount approved msg send failed '\n 'template %s not found', msg_type)\n return\n\n promised_date = None\n if application.promised_date:\n promised_date = application.promised_date.strftime('%d-%m-%Y')\n tpl_data = {\n 'user_name': application.user_name,\n 'due_days': application.overdue_days,\n 'app_name': application.app,\n 'phone': application.user_mobile_no,\n 'cs_number': cs_number_conf.get(application.app, '02150202889'),\n 'promised_date': promised_date,\n 'discount_to': discount_to,\n 'effective_to': effective_to,\n }\n content = template.text.format(**tpl_data)\n\n data_list = [{\n 'receiver': '62' + application.user_mobile_no,\n 'content': content,\n 'title': \"\",\n }]\n send_sms(data_list, msg_type, application.app)\n\n\n# 批量发送自定义短信\ndef send_sms(data_list, msg_type, app_name):\n if not data_list:\n return\n for index in range(0, len(data_list), 200):\n req_data = {\n \"app_name\": app_name,\n \"failed_retry\": True,\n \"is_masking\": True,\n \"list\": data_list[index: index+200],\n \"message_level\": 0,\n \"message_type\": \"SMS\",\n \"sms_type\": 3\n }\n try:\n result = MessageService().send_batch(**req_data)\n if not result.get(\"result\"):\n logging.error(\n \"send_sms_failed:%s,req:%s,res:%s\",msg_type,req_data,result)\n except Exception as e:\n logging.error(\n \"send_sms_error:%s,req:%s,res:%s,error:%s\" % (\n msg_type, req_data, result, str(e)))\n return\n logging.info(\"send_sms_success:%s\", msg_type)\n\n#生成自动外呼,和分件\n@action(MessageAction.BOMBER_AUTO_CALL_LIST)\ndef bomber_auto_call_list(payload, msg_id):\n\n with db.atomic():\n #单期件分件,分给各期的外包后,余下分配内部指定id,的bomber\n #外包主要通过partner区分不同阶段,同时识别bomber中的partner_id来识别外包账号\n bomber_dispatch_app()\n\n # 分期件分件,分件主要靠installment 识别不同期的bomber\n dispatch_instalment_app()\n #分件记录\n dis_apps = (DispatchApp\n .select(DispatchApp.application)\n .where(DispatchApp.status == DisAppStatus.NORMAL.value))\n\n c1_apps = (\n Application\n .select(Application.id,\n Application.cycle,\n Application.follow_up_date,\n Application.called_times)\n .where(\n Application.status.not_in([ApplicationStatus.REPAID.value,\n ApplicationStatus.AB_TEST.value]),\n Application.cycle == Cycle.C1A.value,\n Application.is_rejected == False, # noqa\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) < datetime.today().date())\n ).order_by(Application.overdue_days, Application.apply_at)\n )\n dis_apps_ids = [da.application_id for da in dis_apps]\n\n insert_args = []\n\n for a in c1_apps:\n if a.id in dis_apps_ids:\n continue\n insert_args.append({\n 'application': a.id,\n 'cycle': a.cycle,\n 'follow_up_date': a.follow_up_date,\n 'called_times': 1 if a.called_times else 0,\n 'description': 'init'\n })\n\n if not insert_args:\n logging.error('no application need auto call')\n\n #检索application表,插入数据至auto_call_list\n with db.atomic():\n AutoCallList.delete().execute()\n for idx in range(0, len(insert_args), 100):\n AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()\n\n for idx in range(0, len(insert_args), 100):\n application_list = [\n i['application']\n for i in insert_args[idx:idx + 100]\n ]\n #获取校验后有效的电话号码\n send_to_default_q(\n MessageAction.BOMBER_AUTO_CALL_CONTACT,\n {'application_list': application_list}\n )\n\n logging.info('bomber generate auto call list finished')\n\n #将未下P,特定天数的件重分,即积压时间长的件,在分配\n send_to_default_q(\n MessageAction.UPDATE_BOMBER_FOR_SPECIAL,\n {})\n\n\nclass ChangeBomberTool(object):\n @staticmethod\n def in_record(bomber_id, ids, bd):\n subquery = (Application\n .select(Application.amount,\n fn.NOW().alias('created_at'),\n fn.NOW().alias('updated_at'),\n Application.id.alias('application_id'),\n R(str(bomber_id)).alias('bomber_id'),\n fn.NOW().alias('entry_at'),\n R('null').alias('partner_id'),\n SQL('DATE_ADD(CURDATE(),INTERVAL 14 DAY)')\n .alias('expected_out_time'),\n Application.overdue_days.alias(\n 'entry_overdue_days'))\n .where(Application.status !=\n ApplicationStatus.REPAID.value,\n Application.id << ids))\n\n (Application\n .update(latest_bomber=bomber_id)\n .where(Application.id.in_(ids))\n .execute())\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n insert_args = list(map(partial(lambda_result, dct=bd),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n @staticmethod\n def out_record(a, bd):\n _id = str(a.id)\n (DispatchAppHistory.update(\n out_at=datetime.now(),\n out_overdue_days=a.overdue_days,\n out_principal_pending=(\n a.amount -\n Decimal(bd[_id].get('principal_paid'))),\n out_late_fee_pending=(\n bd[_id].get('late_fee') -\n bd[_id].get('late_fee_paid')),\n )\n .where(\n DispatchAppHistory.application == a.id,\n DispatchAppHistory.bomber_id == a.latest_bomber_id\n )).execute()\n\n a.last_bomber = a.latest_bomber\n a.latest_bomber = None\n a.ptp_bomber = None\n a.latest_call = None\n a.called_times = 0\n a.save()\n\n @staticmethod\n def classify(l, b):\n if len(l) == 1:\n return l[0]\n _l = filter(lambda x: x['bomber'] != b, l)\n return min(_l, key=lambda x: len(x['ids']))\n\n\n@action(MessageAction.UPDATE_BOMBER_FOR_SPECIAL)\ndef update_bomber_for_special(payload, msg_id):\n \"\"\"\n cycle 1b 每天将DPD21且没有处于下P状态的件,分配给另一个催收员\n cycle 2 每天将DPD46且没有处于下P状态的件,分配给另一个催收员\n cycle 3 每天将dpd76且没有处于下p状态的件,分配给另一个催收员\n\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n filter_list = {Cycle.C1B.value: {\"overdue_days\": 21, \"role_id\": 5},\n Cycle.C2.value: {\"overdue_days\": 46, \"role_id\": 6},\n Cycle.C3.value: {\"overdue_days\": 76, \"role_id\": 8}}\n cbt = ChangeBomberTool()\n for cycle, values in filter_list.items():\n overdue_days = values[\"overdue_days\"]\n bombers = (Bomber.select()\n .where(Bomber.role == values[\"role_id\"],\n Bomber.instalment == 0,\n Bomber.is_del == 0))\n bids = {b.id:b for b in bombers}\n apps = (Application.select()\n .where(Application.cycle == cycle,\n Application.type == ApplicationType.CASH_LOAN.value,\n Application.overdue_days == overdue_days,\n Application.status == ApplicationStatus.AB_TEST.value,\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) < date.today()),\n Application.latest_bomber_id.in_(list(bids.keys()))))\n classify_dict = defaultdict(list)\n for b in bombers:\n classify_dict[b.group_id].append({\"bomber\": b.id, \"ids\": []})\n with db.atomic():\n app_ids = [i.id for i in apps]\n if app_ids and bids:\n bills = BillService().bill_list(application_ids=app_ids)\n bill_dict = {str(bill['application_id']): bill for bill in\n bills}\n for i in apps:\n current_bomber = bids.get(i.latest_bomber_id)\n if not current_bomber:\n continue\n classify_list = classify_dict.get(current_bomber.group_id)\n d = cbt.classify(classify_list, i.latest_bomber_id)\n d[\"ids\"].append(i.id)\n cbt.out_record(i, bill_dict)\n for group_id, cl_list in classify_dict.items():\n for item in cl_list:\n cbt.in_record(item[\"bomber\"], item[\"ids\"], bill_dict)\n else:\n logging.info(\n \"cycle:{} empty application list {} or bomber list {}\".format(\n cycle, app_ids, list(bids.keys())))\n try:\n update_bomber_for_special_instalment()\n except Exception as e:\n logging.error(\"special_instalment_error:%s\"%str(e))\n\n# 分期c2,c3特殊分件\ndef update_bomber_for_special_instalment():\n filter_list = {Cycle.C1B.value: 21, Cycle.C2.value: 46, Cycle.C3.value: 76}\n for cycle,overdue_days in filter_list.items():\n # 获取分期指定的催收员\n bombers = (Bomber.select().where(Bomber.instalment == cycle,\n Bomber.is_del == 0))\n bids = {b.id:b for b in bombers}\n # 获取催收单\n apps = (Application.select()\n .where(Application.cycle == cycle,\n Application.status == ApplicationStatus.AB_TEST.value,\n Application.type ==\n ApplicationType.CASH_LOAN_STAGING.value,\n Application.overdue_days == overdue_days,\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) < date.today()),\n Application.latest_bomber_id.in_(list(bids.keys()))))\n\n classify_dict = defaultdict(list)\n for b in bombers:\n classify_dict[b.group_id].append({\"bomber\":b.id, \"ids\":[]})\n for a in apps:\n current_bomber = bids.get(a.latest_bomber_id)\n if not current_bomber:\n continue\n classify_list = classify_dict.get(current_bomber.group_id)\n d = ChangeBomberTool.classify(classify_list, a.latest_bomber_id)\n d[\"ids\"].append(a.id)\n with db.atomic():\n for group_id,classify_list in classify_dict.items():\n for cl in classify_list:\n aids = cl[\"ids\"]\n if not aids:\n continue\n latest_bomber_id = cl[\"bomber\"]\n q = (Application.update(latest_bomber = latest_bomber_id,\n last_bomber = Application.latest_bomber)\n .where(Application.id << aids)\n .execute())\n record_param = {\n \"cycle\": cycle,\n \"application_ids\": aids,\n \"dest_bomber_id\": latest_bomber_id,\n }\n out_and_in_record_instalment(**record_param)\n\n\n\ndef bomber_dispatch_app():\n\n # 将单期件c1a分件给外包,外包需设置,partner\n try:\n c1a_dispatch_app()\n except Exception as e:\n logging.error(\"c1a_dispatch_app error:%s\"%str(e))\n\n cycle = {\n 1: 10,\n 2: 30,\n 3: 60,\n 4: 90\n }\n\n # 单期外包 Cycle.C2 overdue_day 31\n apps = (Application.select()\n .where(fn.DATE(Application.C2_entry) == date.today(),\n Application.type == ApplicationType.CASH_LOAN.value))\n\n partners = (Partner.select()\n .where(Partner.status == PartnerStatus.NORMAL.value,\n Partner.cycle == Cycle.C2.value))\n\n apps_ids = [a.id for a in apps]\n dispatch_inserts = []\n start_index = 0\n apps_length = len(apps_ids)\n logging.warning('apps length %s' % str(apps_length))\n\n for p in partners: # 目前就一个partner\n bombers = (Bomber.select()\n .where(Bomber.partner == p.id,\n Bomber.status != BomberStatus.OUTER_LEADER.value,\n Bomber.is_del == 0))\n\n gen = CycleIter([b.id for b in bombers])\n existing_list = []\n\n end_index = start_index + int(apps_length * p.app_percentage)\n logging.info('partner length %s' % str(end_index))\n\n if not apps_ids[start_index:end_index]:\n continue\n bills = BillService().bill_list(\n application_ids=apps_ids[start_index:end_index])\n bill_dict = {bill['application_id']: bill for bill in bills}\n\n for a_id in apps_ids[start_index:end_index]:\n bomber = average_gen(gen, existing_list)\n q = (DispatchApp.delete()\n .where(DispatchApp.application == a_id)\n .execute())\n dispatch_inserts.append({\n 'application': a_id,\n 'bomber': bomber,\n 'partner': p.id,\n })\n\n # 件分给外包后,对数据进行备份以备数据分析\n application = (Application.select()\n .where(Application.id == a_id)).first()\n application.latest_bomber = bomber\n application.status = ApplicationStatus.AB_TEST.value\n application.ptp_bomber = None\n application.save()\n day_next_cycle = (cycle.get(application.cycle) -\n application.overdue_days)\n DispatchAppHistory.create(\n application=a_id,\n partner_id=p.id,\n bomber_id=bomber,\n entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=(\n application.amount -\n Decimal(bill_dict[a_id].get('principal_paid'))),\n entry_late_fee_pending=(\n Decimal(bill_dict[a_id].get('late_fee')) -\n Decimal(bill_dict[a_id].get('late_fee_paid'))),\n expected_out_time=(date.today() +\n timedelta(days=day_next_cycle))\n )\n\n start_index = end_index\n\n with db.atomic():\n for idx in range(0, len(dispatch_inserts), 100):\n DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()\n\n # AB test 分件(人工维护分件)\n\n config = SystemConfig.prefetch(SCI.AB_TEST_C2)\n c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)\n # 余下的单期件分给内部指定催收员id [76, 100, 106, 107, 213, 215, 216, 221, 222, 223, 226, 235]\n c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)\n #python库的application id\n c2 = apps_ids[start_index:]\n if c2:\n bills = BillService().bill_list(\n application_ids=c2)\n else:\n bills = []\n #java库的bill\n bill_dict = {bill['application_id']: bill for bill in bills}\n logging.info('c2 AB_test length: %s' % str(c2))\n gen = CycleIter(c2_bomber)\n existing_list = []\n for c in c2:\n bomber = average_gen(gen, existing_list)\n application = Application.filter(Application.id == c).first()\n application.status = ApplicationStatus.AB_TEST.value\n application.latest_bomber = bomber\n application.ptp_bomber = None\n application.save()\n\n day_next_cycle = 46 - application.overdue_days\n DispatchAppHistory.create(\n application=c,\n bomber_id=bomber,\n entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=(application.amount\n - bill_dict[c].get('principal_paid', 0)),\n entry_late_fee_pending=(\n bill_dict[c].get('late_fee', 0) -\n bill_dict[c].get('late_fee_paid', 0)),\n expected_out_time=(date.today() + timedelta(days=day_next_cycle))\n )\n ab_test_other()\n\n\n\n# 单期的件部分分给外包,内部的C1a 不用分件进入自动外呼\ndef c1a_dispatch_app():\n today = datetime.today().date()\n tomorrow = today + timedelta(days=1)\n #获取单期的件\n c1a_apps = (Application.select()\n .where(Application.status << [ApplicationStatus.UNCLAIMED.value,\n ApplicationStatus.PROCESSING.value],\n Application.dpd1_entry >= today,\n Application.dpd1_entry < tomorrow,\n Application.type == ApplicationType.CASH_LOAN.value))\n all_aids = [a.id for a in c1a_apps]\n # 获取外包部门\n partners = (Partner.select()\n .where(Partner.status == PartnerStatus.NORMAL.value,\n Partner.cycle == Cycle.C1A.value))\n end = 0\n for p in partners:\n #直接通过partner 获取bomber\n bombers = (Bomber.select()\n .where(Bomber.partner == p.id,\n Bomber.is_del == 0))\n start = end\n end += int(len(all_aids) * p.app_percentage)\n aids = all_aids[start:end]\n bids = [b.id for b in bombers]\n if not bids or not aids:\n continue\n # 获取每个外包应该分到的件的个数\n average_number = get_average_number(len(aids),len(bids))\n p_end = 0\n for i,bid in enumerate(bids):\n p_start = p_end\n p_end += average_number[i]\n b_aids = aids[p_start:p_end]\n with db.atomic():\n q = (Application\n .update(latest_bomber = bid,\n status = ApplicationStatus.AB_TEST.value)\n .where(Application.id << b_aids)\n .execute())\n params = {\n \"cycle\": Cycle.C1A.value,\n \"dest_partner_id\": p.id,\n \"application_ids\": b_aids,\n \"dest_bomber_id\": bid\n }\n new_in_record(**params)\n try:\n dispatch_inserts = []\n for aid in b_aids:\n dispatch_inserts.append({'application': aid,\n 'bomber': bid,\n 'partner': p.id,\n 'status': DisAppStatus.NORMAL.value})\n if dispatch_inserts:\n q = (DispatchApp.insert_many(dispatch_inserts).execute())\n except Exception as e:\n logging.error(\"c1a分件写入dispatch_app error:%s\"%str(e))\n\n\ndef ab_test_other():\n cycle_upper = {\n 1: 10,\n 2: 30,\n 3: 60,\n 4: 76\n }\n\n c1b = (Application.select()\n .where(fn.DATE(Application.C1B_entry) == date.today(),\n Application.type == ApplicationType.CASH_LOAN.value)\n .order_by(-Application.overdue_days)\n )\n c1b_id = [a.id for a in c1b]\n\n dis_app_update = (DispatchApp.update(status=DisAppStatus.ABNORMAL.value)\n .where(DispatchApp.application.in_(c1b_id)))\n dis_app_update.execute()\n\n c3 = (Application.select()\n .where(fn.DATE(Application.C3_entry) == date.today(),\n Application.type == ApplicationType.CASH_LOAN.value))\n all_id = [b.id for b in c3]\n\n try:\n # 将C3的件一部分分配给外包\n partners = (Partner.select()\n .where(Partner.status == PartnerStatus.NORMAL.value,\n Partner.cycle == Cycle.C3.value))\n\n start_index, end_index, out_apps = 0, 0, {}\n for p in partners:\n end_index += int(len(all_id) * p.app_percentage)\n out_apps[p.id] = all_id[start_index:end_index]\n start_index = end_index\n c3_id = all_id[end_index:]\n allot_c3_case(out_apps)\n except:\n c3_id = all_id\n\n config = SystemConfig.prefetch(SCI.AB_TEST_C1B, SCI.AB_TEST_C3)\n c1b_bomber = config.get(SCI.AB_TEST_C1B, SCI.AB_TEST_C1B.default_value)\n c3_bomber = config.get(SCI.AB_TEST_C3, SCI.AB_TEST_C3.default_value)\n # 过滤掉催分期的催收员\n c3_bomber = get_cash_bomber(c3_bomber, Cycle.C3.value)\n data = [{'ids': c1b_id, 'bomber': c1b_bomber, 'index': 0, 'cycle': 2},\n {'ids': c3_id, 'bomber': c3_bomber, 'index': 1, 'cycle': 4}]\n\n for d in data:\n applications = d.get('ids')\n length = len(applications)\n end = int(length * d.get('index'))\n gen = CycleIter(d.get('bomber'))\n existing_list = []\n if not applications:\n continue\n bills = BillService().bill_list(\n application_ids=applications)\n bill_dict = {bill['application_id']: bill for bill in bills}\n for a in applications[:end]:\n bomber = average_gen(gen, existing_list)\n application = Application.filter(Application.id == a).first()\n application.status = ApplicationStatus.AB_TEST.value\n application.latest_bomber = bomber\n application.ptp_bomber = None\n application.save()\n\n day_next_cycle = (cycle_upper.get(application.cycle) -\n application.overdue_days)\n DispatchAppHistory.create(\n application=a,\n bomber_id=bomber,\n entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n\n entry_principal_pending=(application.amount -\n bill_dict[a]['principal_paid']),\n entry_late_fee_pending=(bill_dict[a]['late_fee'] -\n bill_dict[a]['late_fee_paid']),\n expected_out_time=(date.today() +\n timedelta(days=day_next_cycle))\n )\n\n # 根据partner表中的配置给外包团队分件。\n if d.get('cycle') == Cycle.C1B.value:\n c1b_wb_partner = (Partner.select()\n .where(Partner.cycle == Cycle.C1B.value,\n Partner.status ==\n PartnerStatus.NORMAL.value))\n # 获取c1b外包团队\n c1b_wb_p_dict = { str(p.id):p.app_percentage for p in c1b_wb_partner}\n c1b_wb_pids = list(map(int, c1b_wb_p_dict.keys()))\n c1b_wb_bombers = (Bomber.select()\n .where(Bomber.is_del == 0,\n Bomber.partner_id << c1b_wb_pids,\n Bomber.password.is_null(False)))\n # 获取每个外包团队的成员和团队应分的件数\n c1b_wb_pba = {}\n apps_num = len(applications)\n for cb in c1b_wb_bombers:\n cb_key = str(cb.partner_id)\n if cb_key in c1b_wb_pba:\n c1b_wb_pba[cb_key][\"bids\"].append(cb.id)\n else:\n # 获取比例,计算分配给外包的件的个数\n start = end\n percentage = c1b_wb_p_dict.get(cb_key, 0)\n end = start + ceil(apps_num * percentage)\n c1b_wb_pba[cb_key] = {\n \"bids\": [cb.id],\n \"pid\": cb.partner_id,\n \"apps\": applications[start:end]\n }\n # 获取现金贷c1b新件剩余的件\n inner_c1b_apps = applications[end:]\n dispatch_c1b_inner_apps(aids=inner_c1b_apps,\n bills=bill_dict,\n period=cycle_upper.get(Cycle.C1B.value))\n for pid,c1b_wb in c1b_wb_pba.items():\n c1b_wb_apps = c1b_wb[\"apps\"]\n c1b_wb_bids = c1b_wb[\"bids\"]\n average_nums = get_average_number(len(c1b_wb_apps),\n len(c1b_wb_bids))\n bid_end = 0\n for b_index,bid in enumerate(c1b_wb_bids):\n bid_start = bid_end\n bid_end = bid_start + average_nums[b_index]\n bid_apps = c1b_wb_apps[bid_start:bid_end]\n logging.info(\"c1b_分件:bid:%s,bid_apps:%s\"%(bid, bid_apps))\n with db.atomic():\n app_sql = (Application.update(latest_bomber=bid,\n status=ApplicationStatus.AB_TEST.value,\n ptp_bomber=None)\n .where(Application.id << bid_apps))\n app_sql.execute()\n params = {\n \"apps\":bid_apps,\n \"partner_id\": int(pid),\n \"bill_dict\": bill_dict,\n \"period\": cycle_upper.get(Cycle.C1B.value),\n \"bomber_id\":bid\n }\n c1b_dispatch_in_record(**params)\n try:\n for aid in bid_apps:\n dispatch_inserts = {\n 'application': aid,\n 'bomber': bid,\n 'partner': int(pid),\n 'status': DisAppStatus.NORMAL.value,\n }\n q = (DispatchApp.update(**dispatch_inserts)\n .where(DispatchApp.application == aid)\n .execute())\n if not q:\n DispatchApp.create(**dispatch_inserts)\n except Exception as e:\n logging.error(\"dispatchApp插入失败:%s\"%str(e))\n\n\ndef allot_c3_case(out_data):\n dispatch_inserts = []\n for key, value in out_data.items():\n if not value:\n continue\n\n bombers = (Bomber\n .filter(Bomber.partner == key,\n Bomber.status == BomberStatus.OUTER.value,\n Bomber.is_del == 0))\n bomber_ids = [b.id for b in bombers]\n bomber = CycleIter(bomber_ids)\n bills = BillService().bill_list(application_ids=value)\n bill_dict = {bill['application_id']: bill for bill in bills}\n\n for v in value:\n bomber_id = bomber.__next__()\n q = (DispatchApp.delete()\n .where(DispatchApp.application == v)\n .execute())\n dispatch_inserts.append({\n 'application': v,\n 'bomber': bomber_id,\n 'partner': key,\n })\n\n # 对数据进行备份以备数据分析\n application = (Application.filter(Application.id == v)).first()\n application.latest_bomber = bomber_id\n application.ptp_bomber = None\n application.status = ApplicationStatus.AB_TEST.value\n application.save()\n\n # c3进入下一个cycle时逾期天数为90天\n day_next_cycle = (90 - application.overdue_days)\n DispatchAppHistory.create(\n application=v,\n partner_id=key,\n bomber_id=bomber_id,\n entry_at=datetime.now(),\n entry_overdue_days=application.overdue_days,\n entry_principal_pending=(\n application.amount -\n Decimal(bill_dict[v].get('principal_paid'))),\n entry_late_fee_pending=(\n Decimal(bill_dict[v].get('late_fee')) -\n Decimal(bill_dict[v].get('late_fee_paid'))),\n expected_out_time=(\n date.today() + timedelta(days=day_next_cycle))\n )\n\n with db.atomic():\n for idx in range(0, len(dispatch_inserts), 100):\n DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()\n\n# 获取只催单期的催收员\ndef get_cash_bomber(bids, cycle):\n cash_bombers = (Bomber.select()\n .where(Bomber.id << bids,\n Bomber.is_del == 0,\n Bomber.instalment != cycle))\n cash_bids = [b.id for b in cash_bombers]\n return cash_bids\n\n# c1b 单期的件分件给内部员工\ndef dispatch_c1b_inner_apps(aids, bills, period=30):\n # 获取需要分件的员工\n bombers = (Bomber.select()\n .where(Bomber.role_id == 5,\n Bomber.is_del == 0,\n Bomber.instalment == 0))\n bids = [b.id for b in bombers]\n if not aids or not bids:\n return\n avg_num = get_average_number(len(aids),len(bids))\n end = 0\n with db.atomic():\n for index,b in enumerate(bids):\n start = end\n end = start + avg_num[index]\n b_aids = aids[start:end]\n app_sql = (Application.update(latest_bomber=b,\n status=ApplicationStatus.AB_TEST.value,\n ptp_bomber=None)\n .where(Application.id << b_aids))\n app_sql.execute()\n params = {\n \"apps\": b_aids,\n \"bill_dict\": bills,\n \"period\": period,\n \"bomber_id\": b\n }\n c1b_dispatch_in_record(**params)\n\n# 将分期的件分配给员工\ndef dispatch_instalment_app():\n\n cycle_list = [Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value,Cycle.M3.value]\n # 获取每天,获取每个cycle没有分出去的件\n for cycle in cycle_list:\n apps = (Application.select()\n .where(Application.cycle == cycle,\n Application.latest_bomber.is_null(True),\n Application.status != ApplicationStatus.REPAID.value,\n (Application.type ==\n ApplicationType.CASH_LOAN_STAGING.value)))\n aids = [a.id for a in apps]\n if not aids:\n continue\n # 获取指定的bomber\n bombers = (Bomber.select()\n .where(Bomber.is_del == 0,\n Bomber.instalment == cycle))\n bids = [b.id for b in bombers]\n if not bids:\n continue\n average_nums = get_average_number(len(apps),len(bids))\n end = 0\n for i,bid in enumerate(bids):\n start = end\n end = start + average_nums[i]\n bid_apps = aids[start:end]\n with db.atomic():\n # 更新状态\n q = (Application.update(ptp_bomber = None,\n latest_bomber = bid, #最新的催收员id\n last_bomber = Application.latest_bomber,#前一接收的催收员\n status = ApplicationStatus.AB_TEST.value)#人工维护的件\n .where(Application.id << bid_apps)\n .execute())\n record_param = {\"cycle\": cycle,\n \"application_ids\": bid_apps,\n \"dest_bomber_id\": bid}\n out_and_in_record_instalment(**record_param)\n\n\n# 分期的入案和出案\ndef out_and_in_record_instalment(**kwargs):\n if not kwargs.get(\"application_ids\"):\n return\n # 先出案\n out_q = (DispatchAppHistory.update(out_at = fn.NOW())\n .where(DispatchAppHistory.application << kwargs['application_ids'],\n DispatchAppHistory.out_at.is_null(True))\n .execute())\n # 入案\n cycle_period = {\n 1: '10',\n 2: '30',\n 3: '60',\n 4: '90'\n }\n period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')\n kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'\n subquery = (Application\n .select(Application.amount,\n fn.NOW().alias('created_at'),\n fn.NOW().alias('updated_at'),\n Application.id.alias('application_id'),\n R(str(kwargs['dest_bomber_id'])).alias('bomber_id'),\n fn.NOW().alias('entry_at'),\n Application.overdue_days.alias('entry_overdue_days'),\n R(str(kwargs['dest_partner_id'])).alias('partner_id'),\n (SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'\n ' t1.overdue_days) DAY)' % period))\n .alias('expected_out_time'))\n .where(Application.status != ApplicationStatus.REPAID.value,\n Application.id << kwargs['application_ids']))\n application_list = list(subquery)\n for idx in range(0, len(application_list), 50):\n applications = application_list[idx:idx + 50]\n app_ids = [i.application_id for i in applications]\n # 获取所有的overdue_bill\n overdue_bills = (OverdueBill.select()\n .where(OverdueBill.collection_id << app_ids))\n sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]\n bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)\n insert_args = lambad_instalment_result(bill_list, applications)\n if not insert_args:\n continue\n DispatchAppHistory.insert_many(insert_args).execute()\n\n#分期入案结果格式化\ndef lambad_instalment_result(bill_list,applications):\n bill_dict = {}\n insert_args = []\n # 计算入案金额\n for sub_bill in bill_list:\n bill_id = sub_bill[\"bill_id\"]\n principal_pending = sub_bill[\"amount\"] - sub_bill['principal_paid']\n late_fee_pending = sub_bill[\"late_fee\"] - sub_bill[\"late_fee_paid\"]\n if bill_id in bill_dict:\n bill_dict[bill_id][\"entry_principal_pending\"] += principal_pending\n bill_dict[bill_id][\"entry_late_fee_pending\"] += late_fee_pending\n else:\n bill_dict[bill_id] = {\n \"entry_principal_pending\": principal_pending,\n \"entry_late_fee_pending\": late_fee_pending\n }\n\n for app in applications:\n bill_entry = bill_dict.get(app.bill_id, {})\n entry_principal_pending = bill_entry.get(\"entry_principal_pending\", 0)\n entry_late_fee_pending = bill_entry.get(\"entry_late_fee_pending\", 0)\n insert_dict = {\n 'created_at': app.created_at,\n 'updated_at': app.updated_at,\n 'application': app.application_id,\n 'bomber_id': app.bomber_id,\n 'entry_at': app.entry_at,\n 'entry_overdue_days': app.entry_overdue_days,\n 'partner_id': app.partner_id,\n 'expected_out_time': app.expected_out_time,\n 'entry_principal_pending': entry_principal_pending,\n 'entry_late_fee_pending': entry_late_fee_pending\n }\n insert_args.append(insert_dict)\n return insert_args\n\n\ndef c1b_dispatch_in_record(**kwargs):\n app_ids = kwargs.get(\"apps\")\n partner_id = kwargs.get(\"partner_id\",\"null\")\n bill_dict = kwargs.get(\"bill_dict\")\n period = kwargs.get(\"period\")\n bomber_id = kwargs.get('bomber_id')\n if not all([app_ids, partner_id, bill_dict, period]):\n return False\n bill_dict = { str(k):v for k,v in bill_dict.items()}\n subquery = (Application\n .select(Application.amount,\n fn.NOW().alias('created_at'),\n fn.NOW().alias('updated_at'),\n Application.id.alias('application_id'),\n R(str(bomber_id)).alias('bomber_id'),\n fn.NOW().alias('entry_at'),\n Application.overdue_days.alias('entry_overdue_days'),\n R(str(partner_id)).alias('partner_id'),\n (SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'\n ' t1.overdue_days) DAY)' % period))\n .alias('expected_out_time'))\n .where(Application.id << app_ids))\n application_list = list(subquery)\n for idx in range(0,len(application_list),1000):\n applications = application_list[idx:idx+1000]\n insert_args = list(map(partial(lambda_result,\n dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n\n#获取联系的电话号码\n@action(MessageAction.BOMBER_AUTO_CALL_CONTACT)\ndef bomber_auto_call_contact(payload, msg_id):\n application_list = payload['application_list']\n applications = []\n for app_id in application_list:\n applications.append(Application.filter(Application.id == app_id)\n .first())\n # 得到每个件的联系人队列\n with db.atomic():\n for application in applications:\n cycle = application.cycle\n # 修改查询时的条件\n contacts = (\n Contact\n .select()\n .where(Contact.user_id == application.user_id,\n Contact.latest_status.not_in(ContactStatus.no_use()))\n .order_by(-Contact.useful,\n Contact.relationship,\n -Contact.total_duration,\n -Contact.total_count)\n )\n\n level1 = []\n level2 = []\n level3 = []\n level = []\n for c in contacts:\n if c.relationship == Relationship.APPLICANT.value:\n level.append(c)\n elif c.relationship == Relationship.FAMILY.value:\n level1.append(c)\n elif c.relationship == Relationship.COMPANY.value:\n level2.append(c)\n elif c.relationship == Relationship.SUGGESTED.value:\n level3.append(c)\n\n contacts = level + level2 + level1 + level3\n\n numbers = []\n fc_count = 0\n\n # Pre-check if need phone calls,校验手机号是否可以拨通\n app_calls = []\n need_verify = False\n for eac_contact in contacts:\n if (eac_contact.relationship == Relationship.FAMILY.value and\n eac_contact.useful == ContactsUseful.NONE.value):\n need_verify = True\n break\n\n if need_verify:\n logging.info('Found contact need update. app id {}'\n .format(str(application.id)))\n app_calls = AuditService().phone_invalid(cat=Relationship(1).name,\n application_id=application.external_id)\n\n call_history = True\n c1b_family_dict = defaultdict(list)\n for c in contacts:\n if c.relationship == Relationship.COMPANY.value:\n if cycle == Cycle.C1A.value:\n call_history = check_call_history(application)\n break\n if cycle == Cycle.C1B.value:\n # 暂时c1b公司只打本人填写的电话\n if c.source != CompanyContactType.BASIC_INFO_JOB_TEL.value:\n continue\n if c.relationship == Relationship.FAMILY.value:\n if cycle == Cycle.C1A.value:\n call_history = check_call_history(application)\n break\n\n # Update contact useful\n if c.useful == ContactsUseful.NONE.value:\n c.useful = check_valid_phone(app_calls, c)\n c.save()\n\n if c.useful == ContactsUseful.INVALID.value:\n logging.info('Found invalid contact. {}'\n .format(str(c.id)))\n continue\n\n # 需要对family类进行排序\n if cycle == Cycle.C1B.value:\n c1b_family_dict[c.source].append(c.number)\n continue\n if c.relationship == Relationship.SUGGESTED.value:\n if cycle not in (Cycle.C2.value, Cycle.C3.value):\n break\n if cycle == Cycle.C2.value and fc_count > 10:\n break\n if cycle == Cycle.C3.value and fc_count > 20:\n break\n fc_count += 1\n numbers.append(c.number)\n\n # if cycle1 applicant is in no_use add ec\n if len(numbers) == 0 or not call_history:\n src_contact = (\n Contact.select()\n .where(Contact.user_id == application.user_id,\n Contact.source in FamilyContactType.c1a_order()))\n\n # C1A五天内催收电话没打通,按新的顺序拨打;由原来的2种变更为4种\n c1a_family_dict = defaultdict(list)\n for e in src_contact:\n c1a_family_dict[e.source].append(e.number)\n\n for call_type in FamilyContactType.c1a_order():\n numbers.extend(c1a_family_dict[call_type])\n\n if cycle == Cycle.C1B.value:\n for call_type in FamilyContactType.c1b_order():\n numbers.extend(c1b_family_dict[call_type])\n\n numbers = list(set(numbers))\n update_query = (\n AutoCallList\n .update(numbers=','.join(numbers))\n .where(AutoCallList.application == application.id)\n )\n update_query.execute()\n\n\ndef check_valid_phone(phone_list, contact):\n useful = ContactsUseful.AVAILABLE.value\n for each_phone in phone_list:\n if contact.number == each_phone.get('tel_no') or \\\n contact.number == each_phone.get('mobile_no'):\n useful = ContactsUseful.INVALID.value\n break\n return useful\n\n# c1a的件如果5天之内没有接通,开放ec\ndef check_call_history(application):\n app_create_at = application.created_at + timedelta(days=4)\n if datetime.today().date() > app_create_at.date():\n call_actions = (CallActions.select()\n .where(CallActions.type == 0,\n CallActions.application == application.id,\n CallActions.created_at >\n (datetime.now() - timedelta(days=5))))\n for call in call_actions:\n if call.phone_status == PhoneStatus.CONNECTED.value:\n return True\n return False\n return True\n\n\n#当前时间与更新时间间隔超过 SCAVENGER_TIME 时间时,SCAVENGER更新状态\n@action(MessageAction.BOMBER_SCAVENGER)\ndef scavenger(payload, msg_id):\n scavenger_time = -60\n scavenger = (SystemConfig.select()\n .where(SystemConfig.key == 'SCAVENGER_TIME')\n .first())\n if scavenger and scavenger.value.isdigit():\n scavenger_time = -int(scavenger.value)\n update_auto_call_list = (\n AutoCallList\n .update(status=AutoListStatus.PENDING.value,\n description='scavenger')\n .where(\n AutoCallList.status == AutoListStatus.PROCESSING.value,\n AutoCallList.updated_at <\n datetime.now() + timedelta(minutes=scavenger_time),\n )\n )\n count = update_auto_call_list.execute()\n logging.info('scavenger processed %s application', count)\n\n # 更新自动外呼中状态是邮箱的件的状态\n mail_box_scavenger_time = -30\n mail_box_scavenger = (SystemConfig.select()\n .where(SystemConfig.key == 'MAIL_BOX_SCAVENGER_TIME')\n .first())\n if mail_box_scavenger and mail_box_scavenger.value.isdigit():\n mail_box_scavenger_time = -int(mail_box_scavenger.value)\n update_mail_box_call_list = (\n AutoCallList.update(status=AutoListStatus.PENDING.value)\n .where(AutoCallList.status == AutoListStatus.MAILBOX.value,\n AutoCallList.updated_at <\n datetime.now() + timedelta(minutes=mail_box_scavenger_time))\n )\n mail_box_count = update_mail_box_call_list.execute()\n logging.info(\"scavenger update mail box %s\", mail_box_count)\n\n # ivr中30分钟没有接收到回调,修改ivr中的状态\n update_auto_ivr = (\n AutoIVR\n .update(status=AutoIVRStatus.AVAILABLE.value)\n .where(AutoIVR.status == AutoIVRStatus.PROCESSING.value,\n AutoIVR.updated_at < datetime.now() + timedelta(minutes=-30)\n )\n )\n ivr_result = update_auto_ivr.execute()\n logging.info(\"scavenger update %s ivr\"%ivr_result)\n\n\n@action(MessageAction.BOMBER_CLEAR_OVERDUE_PTP)\ndef bomber_clear_overdue_ptp(payload, msg_id):\n # 对于C1B, C2 和 C3 不存在预测试呼出,故其ptp清除后需回到外包或ab_test\n #C1B, C2,C3 件,当前时间超过承诺还款时间时,转为人工维护\n update_overdue_ptp_ab = (\n Application.update(\n status=ApplicationStatus.AB_TEST.value,\n ).where(\n fn.DATE(Application.promised_date) < datetime.today().date(),\n Application.status == ApplicationStatus.PROCESSING.value,\n Application.cycle << [Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value]\n )\n )\n count1 = update_overdue_ptp_ab.execute()\n logging.info('bomber overdue ptp for C1B C2 and C3 cleared: %s', count1)\n\n now_and_yesterday = ((datetime.today() + timedelta(days=1)).date(),\n datetime.today().date())\n overdue_1a1b_cs_ptp = (CallActions\n .select()\n .where(fn.DATE(CallActions.promised_date)\n .in_(now_and_yesterday),\n CallActions.bomber_id == 72))\n update_overdue_1a1b_cs_ptp = (\n Application\n .update(status=ApplicationStatus.UNCLAIMED.value)\n .where(Application.status == ApplicationStatus.PROCESSING.value,\n Application.cycle == Cycle.C1A.value,\n Application.id.in_(overdue_1a1b_cs_ptp)))\n\n logging.debug(\"bomber c1a c1b cs ptp: %s\", update_overdue_1a1b_cs_ptp)\n count2 = update_overdue_1a1b_cs_ptp.execute()\n logging.info('bomber c1a c1b cs overdue ptp cleared: %s', count2)\n\n update_overdue_ptp = (\n Application\n .update(\n status=ApplicationStatus.UNCLAIMED.value,\n ).where(\n fn.DATE(Application.promised_date) < datetime.today().date(),\n Application.status == ApplicationStatus.PROCESSING.value,\n Application.cycle == Cycle.C1A.value,\n )\n )\n count = update_overdue_ptp.execute()\n logging.info('bomber overdue ptp cleared: %s', count)\n\n\n@action(MessageAction.REPORT_BOMBER_COLLECTION)\ndef report_bomber_collection(payload, msg_id):\n start_date = (ReportCollection\n .select(fn.MAX(ReportCollection.apply_date))\n .scalar())\n now = datetime.now()\n if start_date and str(start_date) == str(now)[:10]:\n return\n end_date = str(now + timedelta(days=1))[:10]\n start_date = str(now)[:10]\n\n dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))\n all_overdue_loan_sql1 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n GROUP BY 1;\n \"\"\"\n s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()\n d1 = OperatedDict(s_data1)\n\n all_overdue_loan_sql2 = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE DATE(ba.follow_up_date) > CURDATE()\n AND ba.called_counts = 0\n GROUP BY 1;\n \"\"\"\n s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()\n d2 = OperatedDict(s_data2)\n\n overdue_loans_entered_into_predict_call_system_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.id)\n FROM bomber.auto_call_list ba\n WHERE ba.called_counts >= 1\n GROUP BY 1;\n \"\"\"\n s_data3 = readonly_db.execute_sql(\n overdue_loans_entered_into_predict_call_system_sql).fetchall()\n d3 = OperatedDict(s_data3)\n\n loans_completed_sql = \"\"\"\n SELECT ba.cycle, COUNT(DISTINCT ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()\n d4 = OperatedDict(s_data4)\n\n connected_calls_automatic_sql = \"\"\"\n SELECT ba.cycle, COUNT(ba.application_id)\n FROM bomber.auto_call_actions ba\n WHERE DATE(ba.created_at) = CURDATE()\n GROUP BY 1;\n \"\"\"\n s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()\n d5 = OperatedDict(s_data5)\n\n connected_calls_manual_sql = \"\"\"\n SELECT bb.cycle, COUNT(bb.id)\n FROM bomber.bombing_history bb\n WHERE DATE(bb.created_at) = curdate()\n AND (bb.bomber_id < 150 OR bb.bomber_id > 200)\n GROUP BY bb.cycle;\n \"\"\"\n s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()\n d6 = OperatedDict(s_data6)\n\n logging.info('Directly get data from database successfully.')\n\n c1 = d1 - d2\n c2 = d3\n c3 = c2 / c1\n c4 = d4\n c5 = c4 / c2\n c6 = d5\n c7 = c6 / c4\n c8 = d6\n c9 = OperatedDict(get_agent())\n c10 = (c6 + c8) / c9\n try:\n c11 = average_call_duration_team(start_date, end_date)\n except AttributeError:\n c11 = {}\n lst = []\n for i in range(1, 5):\n lst.append({\n 'apply_date': start_date,\n 'cycle': dct[i],\n 'all_overdue_loan': c1.get(i, 0),\n 'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),\n 'of_overdue_loans_entered_into_predict_call_system':\n round(c3.get(i, 0) * 100, 1),\n 'loans_completed': c4.get(i, 0),\n 'of_completed_loans_in_predict_call_system':\n round(c5.get(i, 0) * 100, 1),\n 'connected_calls_automatic': c6.get(i, 0),\n 'connected_calls_automatic_completed_loans':\n round(c7.get(i, 0), 1),\n 'connected_calls_manual': c8.get(i, 0),\n 'agent': c9.get(i, 0),\n 'average_calls_agent': round(c10.get(i, 0), 1),\n 'average_call_duration_team': round(c11.get(i, 0), 1)\n })\n ReportCollection.insert_many(lst).execute()\n\n logging.info('report_bomber_collection:Done!')\n\n\n@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)\ndef bomber_auto_call_list_record(payload, msg_id):\n \"\"\"记录一年的auto_call_list,删除前一天的数据,增加今天的数据\"\"\"\n now = datetime.now()\n if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):\n date_sql = \"\"\"\n SELECT DATE(created_at) FROM auto_call_list_record\n GROUP BY DATE(created_at) limit 1\n \"\"\"\n del_date = db.execute_sql(date_sql).fetchone()[0]\n del_sql = \"\"\"\n DELETE FROM auto_call_list_record WHERE date(created_at) = %s\n \"\"\"\n db.execute_sql(del_sql, [del_date])\n sql = \"\"\"\n INSERT INTO auto_call_list_record\n SELECT * FROM auto_call_list\n \"\"\"\n db.execute_sql(sql)\n logging.info(\"bomber_auto_call_list_record done\")\n\n\n@action(MessageAction.BOMBER_MANUAL_CALL_LIST)\ndef bomber_manual_call_list(payload, msg_id):\n \"\"\"\n 手动分件主要依赖\n\n :param payload:\n :param msg_id:\n :return:\n \"\"\"\n batch_id = payload.get('batch_id')\n if batch_id is None:\n logging.warning('Invalid batch id')\n return\n query = (ManualCallList\n .select()\n .where(ManualCallList.batch_id == batch_id,\n ManualCallList.status << ManualCallListStatus.available()))\n if not query.exists():\n logging.warning('Empty application id list')\n return\n\n for q in query:\n application_ids = json.loads(q.application_ids or '[]')\n\n # where\n cycle = 0\n where_list = [(Application.id << application_ids),\n Application.latest_bomber_id == q.src_bomber_id]\n src_params = json.loads(q.src_params or '{}')\n if \"cycle\" in src_params:\n where_list.append(Application.cycle == src_params['cycle'])\n cycle = src_params['cycle']\n if \"status\" in src_params:\n where_list.append(Application.status == src_params['status'])\n\n # update\n update_dict = {'latest_bomber': q.dest_bomber_id}\n dest_params = json.loads(q.dest_params or '{}')\n if \"cycle\" in dest_params:\n update_dict['cycle'] = dest_params['cycle']\n cycle = dest_params['cycle']\n if \"status\" in dest_params:\n update_dict['status'] = dest_params['status']\n\n with db.atomic():\n try:\n # update dispatch_app\n if q.update_dispatch_app:\n if q.dest_partner_id is None:\n raise ValueError('unallowed operation')\n (DispatchApp\n .delete()\n .where(DispatchApp.application_id.in_(application_ids))\n .execute())\n\n (DispatchApp\n .insert_many([{\n 'application': i,\n 'partner': q.dest_partner_id,\n 'bomber': q.dest_bomber_id,\n 'status': DisAppStatus.NORMAL.value}\n for i in application_ids])\n .execute())\n \n application_success_row = (\n Application\n .update(**update_dict)\n .where(*where_list)\n .execute()\n )\n if application_success_row == 0:\n raise ValueError('Invalid parameter')\n\n (ManualCallList\n .update(\n status=ManualCallListStatus.SUCCESS.value,\n length=application_success_row)\n .where(ManualCallList.id == q.id)\n .execute())\n\n out_and_in_record(\n src_bomber_id=q.src_bomber_id,\n application_ids=application_ids,\n dest_partner_id=q.dest_partner_id,\n dest_bomber_id=q.dest_bomber_id,\n cycle=cycle\n )\n except Exception:\n db.rollback()\n (ManualCallList\n .update(\n status=ManualCallListStatus.FAILED.value,\n length=0)\n .where(ManualCallList.id == q.id)\n .execute())\n logging.error(\"PRINT BOMBER_MANUAL_CALL_LIST ERROR:\\n%s\",\n traceback.format_exc())\n continue\n\n\ndef lambda_result(item, dct):\n a = str(item.application_id)\n entry_principal_pending = (Decimal(item.amount or 0) -\n dct[a]['principal_paid'])\n entry_late_fee_pending = dct[a]['late_fee'] - dct[a]['late_fee_paid']\n\n return {\n 'created_at': item.created_at,\n 'updated_at': item.updated_at,\n 'application': a,\n 'bomber_id': item.bomber_id,\n 'entry_at': item.entry_at,\n 'entry_overdue_days': item.entry_overdue_days,\n 'partner_id': item.partner_id,\n 'expected_out_time': item.expected_out_time,\n 'entry_principal_pending': entry_principal_pending,\n 'entry_late_fee_pending': entry_late_fee_pending\n }\n\n\ndef out_and_in_record(**kwargs):\n \"\"\"\n 件在催收系统的出案和入案\n \"\"\"\n new_out_record(**kwargs)\n new_in_record(**kwargs)\n\ndef new_out_record(**kwargs):\n if not kwargs['application_ids']:\n return\n (DispatchAppHistory\n .update(out_at=fn.NOW())\n .where(DispatchAppHistory.bomber_id == kwargs['src_bomber_id'],\n DispatchAppHistory.application << kwargs['application_ids'],\n DispatchAppHistory.out_at.is_null(True))\n .execute())\n # 如果是月底分件,ptp_bomber不用置空\n if kwargs.get(\"month_dispatch\"):\n return\n # 出案时下p的件ptp_bomber置为空\n try:\n (Application.update(ptp_bomber=None)\n .where(Application.id << kwargs[\"application_ids\"])\n .execute())\n except Exception as e:\n logging.error(\"new_out_record error:aids:%s,error:%s\" %\n (kwargs[\"application_ids\"],str(e)))\n\ndef new_in_record(**kwargs):\n cycle_period = {\n 1: '10',\n 2: '30',\n 3: '60',\n 4: '90'\n }\n period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')\n kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'\n subquery = (Application\n .select(Application.amount,\n fn.NOW().alias('created_at'),\n fn.NOW().alias('updated_at'),\n Application.id.alias('application_id'),\n R(str(kwargs['dest_bomber_id'])).alias('bomber_id'),\n fn.NOW().alias('entry_at'),\n Application.overdue_days.alias('entry_overdue_days'),\n R(str(kwargs['dest_partner_id'])).alias('partner_id'),\n (SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'\n ' t1.overdue_days) DAY)' % period))\n .alias('expected_out_time'))\n .where(Application.status != ApplicationStatus.REPAID.value,\n Application.id << kwargs['application_ids']))\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n app_ids = [i.application_id for i in applications]\n bill_list = BillService().bill_list(application_ids=app_ids)\n bill_dict = {str(bill['application_id']): bill for bill in bill_list}\n insert_args = list(map(partial(lambda_result,\n dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\n\ndef end_old_application(old_app, paid=False):\n if paid:\n if old_app.status == OldLoanStatus.WAITING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return\n if old_app.status == OldLoanStatus.PROCESSING.value:\n old_app.status = OldLoanStatus.PAID.value\n old_app.save()\n return old_app.application_id\n\n end_date = old_app.end_date\n now = datetime.now()\n if now >= max(end_date, old_app.promised_date or now):\n old_app.status = OldLoanStatus.FINISHED.value\n old_app.save()\n return old_app.application_id\n\n\n@action(MessageAction.UPDATE_OLD_LOAN_APPLICATION)\ndef update_old_loan_application(payload, msg_id):\n items = (Application\n .select(Application, OldLoanApplication)\n .join(OldLoanApplication,\n JOIN_INNER,\n on=(Application.id ==\n OldLoanApplication.application_id).alias('old_app'))\n .where(OldLoanApplication.status\n .in_(OldLoanStatus.available())))\n out_list = []\n for application in items:\n if application.overdue_days > 90:\n if application.old_app.status == OldLoanStatus.WAITING.value:\n start_old_application(application.old_app)\n else:\n out_list.append(application.old_app)\n\n success_list = [end_old_application(item) for item in out_list]\n app_ids = list(filter(None, success_list))\n\n if app_ids:\n bomber_id = SpecialBomber.OLD_APP_BOMBER.value\n out_record(src_bomber_id=bomber_id, application_ids=app_ids)\n\n\ndef in_record(**kwargs):\n \"\"\"\n :param kwargs: dist_partner_id, dist_bomber_id,\n expected_out_time, application_ids\n :return:\n \"\"\"\n # TODO: 入案记录统一\n kwargs['dist_partner_id'] = kwargs.get('dist_partner_id') or 'null'\n subquery = (Application\n .select(Application.amount,\n fn.NOW().alias('created_at'),\n fn.NOW().alias('updated_at'),\n Application.id.alias('application_id'),\n R(str(kwargs['dist_bomber_id'])).alias('bomber_id'),\n fn.NOW().alias('entry_at'),\n Application.overdue_days.alias('entry_overdue_days'),\n R(str(kwargs['dist_partner_id'])).alias('partner_id'),\n R('\"{}\"'.format(kwargs['expected_out_time']))\n .alias('expected_out_time'))\n .where(Application.status != ApplicationStatus.REPAID.value,\n Application.id << kwargs['application_ids']))\n application_list = list(subquery)\n for idx in range(0, len(application_list), 1000):\n applications = application_list[idx:idx + 1000]\n app_ids = [i.application_id for i in applications]\n bill_list = BillService().bill_list(application_ids=app_ids)\n bill_dict = {str(bill['application_id']): bill for bill in bill_list}\n insert_args = list(map(partial(lambda_result, dct=bill_dict),\n applications))\n DispatchAppHistory.insert_many(insert_args).execute()\n\n\ndef out_record(**kwargs):\n \"\"\"\n\n :param kwargs: src_bomber_id, application_ids\n :return:\n \"\"\"\n # TODO: 出案记录统一\n if not kwargs.get('application_ids'):\n return\n (DispatchAppHistory\n .update(out_at=fn.NOW())\n .where(DispatchAppHistory.bomber_id == kwargs['src_bomber_id'],\n DispatchAppHistory.application << kwargs['application_ids'])\n .execute())\n # 出案时下p的件ptp_bomber置为空\n try:\n (Application.update(ptp_bomber=None)\n .where(Application.id << kwargs[\"application_ids\"])\n .execute())\n except Exception as e:\n logging.error(\"out_record error:aids:%s,error:%s\" %\n (kwargs[\"application_ids\"], str(e)))\n\n\ndef start_old_application(old_app, cancel=False):\n application_id = old_app.application_id\n if cancel and (old_app.status == OldLoanStatus.PAID.value):\n now = datetime.now()\n if old_app.start_date is None:\n # 未进入500的池子里\n old_app.status = OldLoanStatus.WAITING.value\n elif now >= max(old_app.end_date, old_app.promised_date or now):\n # 撤销时用户已经从500的池子出去\n old_app.status = OldLoanStatus.FINISHED.value\n (DispatchAppHistory\n .update(out_at=max(old_app.end_date,\n old_app.promised_date or now))\n .where(DispatchAppHistory.bomber_id == old_app.bomber_id,\n DispatchAppHistory.application == application_id)\n .execute())\n else:\n # 还在500的池子\n old_app.status = OldLoanStatus.PROCESSING.value\n (DispatchAppHistory\n .update(out_at=None)\n .where(DispatchAppHistory.bomber_id == old_app.bomber_id,\n DispatchAppHistory.application == application_id)\n .execute())\n old_app.save()\n return\n\n application = (\n Application\n .get_or_none(Application.id == application_id,\n Application.status != ApplicationStatus.REPAID.value,\n Application.overdue_days > 90,\n Application.promised_date.is_null(True) |\n (fn.DATE(Application.promised_date) <\n datetime.today().date())))\n if not application:\n logging.error(\"Can not set old application %s to start collecting\",\n application_id)\n return\n\n if old_app.status in OldLoanStatus.no_available():\n logging.info(\"%s has finished or paid\", old_app.application_id)\n return\n\n config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)\n sp = config.get(SCI.OLD_APP_PERIOD,\n SCI.OLD_APP_PERIOD.default_value)\n old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value\n old_app.status = OldLoanStatus.PROCESSING.value\n old_app.bomber_id = old_app_bomber\n old_app.start_date = datetime.now()\n # 此处需要判断end_date是否已经被设置过\n if not old_app.end_date:\n old_app.end_date = datetime.now() + timedelta(days=sp)\n old_app.save()\n in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,\n application_ids=[old_app.application_id],\n expected_out_time=str(old_app.end_date))\n\n\n@action(MessageAction.OLD_LOAN_APPLICATION)\ndef old_loan_application(payload, msg_id):\n application_id = payload.get('application_id')\n numbers = payload.get('numbers', [])\n if not (application_id and numbers):\n logging.error(\"empty application id: %s, or invalid numbers: %s\",\n application_id, numbers)\n\n application = Application.get_or_none(Application.id == application_id)\n if (application and\n application.status == ApplicationStatus.REPAID.value):\n logging.error(\"application %s has paid\", application_id)\n return\n\n gold_eye = GoldenEye().get('/applications/%s' % application_id)\n if not gold_eye.ok:\n raise RuntimeError('Get golden eye user failed. {}'\n .format(str(application_id)))\n\n gold_app = gold_eye.json().get('data')\n user_id = gold_app['user_id']\n user_name = gold_app['id_name']\n\n # 通过bill获取账单类型,如果是分期的账单不关联OldloanApplication\n try:\n bill = BillService().bill_dict(application_id=application_id)\n except Exception:\n logging.error(\n 'application %s get bill info failed,old_loan_application',\n application_id)\n return\n\n source_contacts = (Contact\n .filter(Contact.user_id == user_id,\n Contact.relationship ==\n Relationship.APPLICANT.value,\n Contact.source ==\n ApplicantSource.NEW_APPLICANT.value))\n source_contact_set = {i.number for i in source_contacts}\n\n # 如果是分期不做一下操作\n if bill[\"category\"] != ApplicationType.CASH_LOAN_STAGING.value:\n # 获取已有new applicant号码\n old_app = OldLoanApplication.get_or_none(\n OldLoanApplication.application_id == application_id,\n OldLoanApplication.status.in_(OldLoanStatus.available())\n )\n if not old_app:\n old_app = OldLoanApplication.create(application_id=application_id,\n user_id=user_id,\n numbers=','.join(numbers))\n else:\n _numbers = old_app.numbers.split(',')\n # 去重并且删除空号码\n old_app.numbers = ','.join(set([nu for nu in (_numbers + numbers)\n if nu]))\n # 已入催件end_date + 7\n if old_app.status == OldLoanStatus.PROCESSING.value:\n old_app.end_date = old_app.end_date + timedelta(days=7)\n old_app.save()\n\n new_contact = set(numbers) - source_contact_set\n insert_args = [{'user_id': user_id,\n 'name': user_name,\n 'number': i,\n 'relationship': Relationship.APPLICANT.value,\n 'source': ApplicantSource.NEW_APPLICANT.value,\n 'real_relationship': Relationship.APPLICANT.value\n } for i in new_contact]\n if insert_args:\n Contact.insert_many(insert_args).execute()\n if bill[\"category\"] == ApplicationType.CASH_LOAN_STAGING.value:\n return\n start_old_application(old_app)\n\n\ndef run_one_sql(sql):\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n result = cursor.fetchone()[0] / 1000000\n except Exception as e:\n logging.info('run sql error: %s' % str(sql))\n result = Decimal(0)\n return result\n\n\ndef run_member_sql(sql):\n result = [0, 0]\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n sql_result = cursor.fetchone()\n if sql_result:\n result = sql_result\n except Exception as e:\n logging.info('run sql error: %s' % str(sql))\n return result\n\n\ndef run_all_sql(sql):\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(sql)\n result = cursor.fetchall()\n except Exception as e:\n logging.info('run sql error: %s' % str(sql))\n result = []\n return result\n\n\n# 得到dpd1-3的待催维度recover_rate(废弃)\ndef get_before_bomber(date_time):\n begin_time = str(date_time - timedelta(days=7))\n end_time = str(date_time)\n # 得到每周一已存在的件的待催金额\n old_sql = \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at<date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\" % (begin_time, begin_time)\n old_data = run_one_sql(old_sql)\n\n # 得到每天新达到dpd1的待催件的金额\n new_sql = \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at> '%s' \n and created_at<'%s'\n and overdue_days=1;\n \"\"\" % (begin_time, end_time)\n new_data = run_one_sql(new_sql)\n\n # 计算每天进入dpd4的金额\n dpd4_sql = \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>date_add('%s',interval 1 day) \n and created_at< date_add('%s',interval 1 day)\n and overdue_days=4;\n \"\"\" % (begin_time, end_time)\n dpd4_data = run_one_sql(dpd4_sql)\n\n # 周一时的dpd2\\3待还\n dpd2_sql = \"\"\"\n select \n sum(principal_pending+late_fee_pending+interest_pending) as amount\n from \n bill_java.overdue bb\n where \n created_at>'%s' \n and created_at< date_add('%s',interval 1 day)\n and overdue_days in (2,3)\n \"\"\" % (end_time, end_time)\n dpd2_data = run_one_sql(dpd2_sql)\n\n all_money = old_data + new_data\n repayment = all_money - dpd4_data - dpd2_data\n pro = 0\n if all_money:\n pro = (repayment / all_money) * 100\n RepaymentReport.create(\n time=begin_time,\n cycle=0,\n all_money=all_money,\n proportion=pro,\n repayment=repayment\n )\n\n\n# 每周刷新一次recover_rate报表数据(待催维度)\n@action(MessageAction.RECOVER_RATE_WEEK_MONEY)\ndef recover_rate_week_money(payload, msg_id):\n #获取当天RECOVER_RATE_WEEK_MONEY日志次数\n worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))\n .where(WorkerLog.created_at >= date.today(),\n WorkerLog.action == 'RECOVER_RATE_WEEK_MONEY')\n .first())\n\n if worker_log.logs >= 5:\n return\n logging.info('start cal recover_rate_week_money')\n date_time = date.today()\n get_every_cycle_report(date_time)\n\n\n# 得到入催維度的dpd1-3的recover_rate\ndef get_before_bomber_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n for i in range(2, 5):\n money_sql = \"\"\"\n select \n sum(bo1.principal_pending+bo1.late_fee_pending+\n bo1.interest_pending) as dpd1_pending, \n sum(bo2.principal_pending+bo2.late_fee_pending+\n bo2.interest_pending) as dpd4_pending\n from bill_java.overdue bo1\n left join dashboard.application da \n on bo1.application_id=da.id \n left join bill_java.overdue bo2 \n on bo1.application_id=bo2.application_id \n and bo2.overdue_days=%s and bo2.status = 1\n where bo1.overdue_days=1 \n and bo1.status = 1\n and bo1.which_day_overdue>='%s' \n and bo1.which_day_overdue<'%s'\n and da.is_first_loan = %s\n and bo1.stage_num is null\n \"\"\" % (i, begin_date, end_date, is_first_loan)\n try:\n cursor = readonly_db.get_cursor()\n cursor.execute(money_sql)\n money = cursor.fetchone()\n all_money = money[0] / 1000000\n dpd4_money = money[1] / 1000000\n except Exception as e:\n logging.info('get all_money error: %s' % str(e))\n all_money = 0\n dpd4_money = 0\n\n repayment = all_money - dpd4_money\n if begin_date == date_time - timedelta(days=1):\n RepaymentReportInto.create(\n time=begin_date,\n cycle=0,\n all_money=round(all_money, 3),\n proportion='0',\n repayment=round(repayment, 3),\n is_first_loan=is_first_loan,\n contain_out=ContainOut.CONTAIN.value\n )\n else:\n pro = '0'\n if all_money:\n pro = (repayment / all_money) * 100\n pro = str(round(pro, 2))\n RepaymentReportInto.update(\n repayment=round(repayment, 3),\n proportion=pro\n ).where(\n RepaymentReportInto.time == begin_date,\n RepaymentReportInto.cycle == 0,\n RepaymentReportInto.is_first_loan == is_first_loan\n ).execute()\n\n end_date = begin_date\n begin_date = begin_date - timedelta(days=1)\n\n\n# 得到c1a入催维度的recover_rate\ndef get_c1a_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\" % (begin_date, end_date, is_first_loan)\n all_money = run_one_sql(all_money_sql)\n\n begin_date = date_time - timedelta(days=19)\n repayment_sql = \"\"\"\n select \n sum(b.principal_part+b.late_fee_part) as paid_amount, \n cdt\n from \n (select \n br.principal_part, br.late_fee_part, \n date(cdt) as cdt, br.repay_at, br.application_id\n from (\n select ba.id, ba.C1A_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.C1A_entry >= '%s'\n and ba.C1A_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)\n group by 4, 5) b\n group by 2\n \"\"\" % (begin_date, end_date, is_first_loan)\n repayment = run_all_sql(repayment_sql)\n\n if not repayment:\n return\n RepaymentReportInto.create(\n time=end_date - timedelta(days=1),\n cycle=Cycle.C1A.value,\n all_money=round(all_money, 3),\n proportion='0',\n repayment=0,\n is_first_loan=is_first_loan,\n contain_out=ContainOut.CONTAIN.value\n )\n\n for d in repayment:\n repay = d[0] / 1000000\n report = RepaymentReportInto.filter(\n RepaymentReportInto.time == d[1],\n RepaymentReportInto.cycle == Cycle.C1A.value,\n RepaymentReportInto.is_first_loan == is_first_loan\n ).first()\n if report:\n report.repayment = round(repay, 3)\n pro = (repay / report.all_money) * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n# 得到c1b入催维度的recover_rate\ndef get_c1b_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,c1b_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c1b_entry >= '%s'\n and ba.c1b_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\" % (begin_date, end_date, is_first_loan)\n all_money = run_one_sql(all_money_sql)\n\n not_contain_sql = \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id as application_id,c1b_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c1b_entry >= '%s'\n and ba.c1b_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id and bd.partner_id=5)\n ) a\n inner join bill_java.overdue o on a.application_id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\" % (begin_date, end_date, is_first_loan)\n not_contain_money = run_one_sql(not_contain_sql)\n\n begin_date = date_time - timedelta(days=22)\n repayment_sql = \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount,et\n from \n (select br.principal_part, br.late_fee_part,\n date(a.c1b_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c1b_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c1b_entry >= '%s'\n and ba.c1b_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 2\n group by 4, 5) b\n group by 2;\n \"\"\" % (begin_date, end_date, is_first_loan)\n repayment = run_all_sql(repayment_sql)\n\n not_contain_repay_sql = \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c1b_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c1b_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c1b_entry >= '%s'\n and ba.c1b_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=5)\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 2\n group by 4, 5) b\n group by 2\n \"\"\" % (begin_date, end_date, is_first_loan)\n not_contain_repay = run_all_sql(not_contain_repay_sql)\n\n if not not_contain_repay and not repayment:\n return\n for i in ContainOut.values():\n if i == ContainOut.NOT_CONTAIN.value:\n RepaymentReportInto.create(\n time=end_date - timedelta(days=1),\n cycle=Cycle.C1B.value,\n all_money=round(not_contain_money, 3),\n proportion='0',\n repayment=0,\n is_first_loan=is_first_loan,\n contain_out=ContainOut.NOT_CONTAIN.value\n )\n for repay in not_contain_repay:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n\n report = RepaymentReportInto.filter(\n RepaymentReportInto.time == repay[1],\n RepaymentReportInto.is_first_loan == is_first_loan,\n RepaymentReportInto.contain_out == i,\n RepaymentReportInto.cycle == Cycle.C1B.value\n ).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = (repay_money / report.all_money) * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n elif i == ContainOut.CONTAIN.value:\n RepaymentReportInto.create(\n time=end_date - timedelta(days=1),\n cycle=Cycle.C1B.value,\n all_money=round(all_money, 3),\n proportion='0',\n repayment=0,\n is_first_loan=is_first_loan,\n contain_out=ContainOut.CONTAIN.value\n )\n for repay in repayment:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n\n report = RepaymentReportInto.filter(\n RepaymentReportInto.time == repay[1],\n RepaymentReportInto.is_first_loan == is_first_loan,\n RepaymentReportInto.contain_out == i,\n RepaymentReportInto.cycle == Cycle.C1B.value\n ).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = (repay_money / report.all_money) * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n# 得到c2入催维度的recover_rate\ndef get_c2_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o \n on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\" % (begin_date, end_date, is_first_loan)\n all_money = run_one_sql(all_money_sql)\n\n not_contain_sql = \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id,c2_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id \n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\" % (begin_date, end_date, is_first_loan)\n not_contain_money = run_one_sql(not_contain_sql)\n\n begin_date = date_time - timedelta(days=37)\n repayment_sql = \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\" % (begin_date, end_date, is_first_loan)\n repayment = run_all_sql(repayment_sql)\n\n not_contain_repay_sql = \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c2_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c2_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c2_entry >= '%s'\n and ba.c2_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n and not exists(select 1 from bomber.dispatch_app_history bd \n where bd.application_id=ba.id \n and bd.partner_id=1)\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 3\n group by 4, 5) b\n group by 2\n \"\"\" % (begin_date, end_date, is_first_loan)\n not_contain_repay = run_all_sql(not_contain_repay_sql)\n\n if not not_contain_money and repayment:\n return\n for i in ContainOut.values():\n if i == ContainOut.NOT_CONTAIN.value:\n RepaymentReportInto.create(\n time=end_date - timedelta(days=1),\n cycle=Cycle.C2.value,\n all_money=round(not_contain_money, 3),\n proportion='0',\n repayment=0,\n is_first_loan=is_first_loan,\n contain_out=ContainOut.NOT_CONTAIN.value\n )\n for repay in not_contain_repay:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(\n RepaymentReportInto.time == repay[1],\n RepaymentReportInto.is_first_loan == is_first_loan,\n RepaymentReportInto.contain_out == i,\n RepaymentReportInto.cycle == Cycle.C2.value\n ).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = (repay_money / report.all_money) * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n elif i == ContainOut.CONTAIN.value:\n RepaymentReportInto.create(\n time=end_date - timedelta(days=1),\n cycle=Cycle.C2.value,\n all_money=round(all_money, 3),\n proportion='0',\n repayment=0,\n is_first_loan=is_first_loan,\n contain_out=ContainOut.CONTAIN.value\n )\n for repay in repayment:\n repay_money = 0\n if repay[0]:\n repay_money = repay[0] / 1000000\n report = RepaymentReportInto.filter(\n RepaymentReportInto.time == repay[1],\n RepaymentReportInto.is_first_loan == is_first_loan,\n RepaymentReportInto.contain_out == i,\n RepaymentReportInto.cycle == Cycle.C2.value\n ).first()\n if report and report.all_money:\n report.repayment = round(repay_money, 3)\n pro = (repay_money / report.all_money) * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n# 得到c2入催维度的recover_rate\ndef get_c3_into_rate(date_time):\n begin_time = date_time - timedelta(days=1)\n end_time = date_time\n\n for is_first_loan in FIRSTLOAN.values():\n begin_date = begin_time\n end_date = end_time\n all_money_sql = \"\"\"\n select sum(o.principal_pending+o.late_fee_pending+\n o.interest_pending) as pending_amount \n from (\n select ba.id, ba.c3_entry as cdt\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = %s\n ) a\n inner join bill_java.overdue o on a.id=o.application_id \n and date(a.cdt)=date(o.created_at) \n \"\"\" % (begin_date, end_date, is_first_loan)\n all_money = run_one_sql(all_money_sql)\n\n begin_date = date_time - timedelta(days=30)\n repayment_sql = \"\"\"\n select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et\n from\n (select br.principal_part,br.late_fee_part,\n date(a.c3_entry) as et, br.application_id, br.repay_at\n from (\n select ba.id, ba.c3_entry\n from bomber.application ba\n left join dashboard.application da on ba.id=da.id\n where ba.c3_entry >= '%s'\n and ba.c3_entry < '%s'\n and ba.type = 0\n and da.is_first_loan = '%s'\n ) a\n left join bomber.repayment_log br on br.application_id = a.id \n and br.cycle = 4\n group by 4, 5) b\n group by 2\n \"\"\" % (begin_date, end_date, is_first_loan)\n repayment = run_all_sql(repayment_sql)\n\n RepaymentReportInto.create(\n time=end_date - timedelta(days=1),\n cycle=Cycle.C3.value,\n all_money=round(all_money, 3),\n proportion='0',\n repayment=0,\n is_first_loan=is_first_loan,\n contain_out=ContainOut.CONTAIN.value\n )\n if not repayment:\n return\n for repay in repayment:\n repay_money = Decimal(0)\n if repay[0]:\n repay_money = repay[0]\n repay_money = repay_money / 1000000\n report = RepaymentReportInto.filter(\n RepaymentReportInto.time == repay[1],\n RepaymentReportInto.cycle == Cycle.C3.value,\n RepaymentReportInto.is_first_loan == is_first_loan\n ).first()\n if report:\n report.repayment = repay_money\n pro = 0\n if report.all_money and int(report.all_money):\n pro = (repay_money / report.all_money) * 100\n pro = str(round(pro, 2))\n report.proportion = pro\n report.save()\n\n\n# 每天刷新一次recover_rate报表数据(入催维度)\n@action(MessageAction.RECOVER_RATE_WEEK_MONEY_INTO)\ndef recover_rate_week_money_into(payload, msg_id):\n worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))\n .where(WorkerLog.created_at >= date.today(),\n WorkerLog.action == 'RECOVER_RATE_WEEK_MONEY_INTO')\n .first())\n\n if worker_log and worker_log.logs >= 5:\n return\n date_time = date.today()\n get_before_bomber_rate(date_time)\n get_c1a_into_rate(date_time)\n get_c1b_into_rate(date_time)\n get_c2_into_rate(date_time)\n get_c3_into_rate(date_time)\n\n # 将已经成熟的数据从未成熟改为成熟\n ripe_days = {0: 3, 1: 7, 2: 20, 3: 30, 4: 30}\n for i in range(0, 5):\n repe_date = date.today() - timedelta(days=ripe_days[i])\n (RepaymentReportInto\n .update(ripe_ind=RipeInd.RIPE.value)\n .where(RepaymentReportInto.time < repe_date,\n RepaymentReportInto.cycle == i)\n ).execute()\n\n\n# ----------------- 计算summary_bomber中原summary存在的指标 --------------------\n# 得到基础数据\ndef get_static_bomber(begin_date):\n active_date = begin_date - timedelta(days=8)\n bombers = (BomberR\n .select(BomberR.id,\n BomberR.role.alias('role'),\n BomberR.last_active_at.alias('active'))\n .where(BomberR.last_active_at > active_date,\n BomberR.role << [1, 2, 4, 5, 6, 8,9]))\n summary = []\n for bomber in bombers:\n summary.append({\n 'time': begin_date,\n 'bomber_id': bomber.id,\n 'cycle': bomber.role.cycle,\n 'work_ind': 0\n })\n SummaryBomber.insert_many(summary).execute()\n\n\n# 部分指标须在当天晚上计算完成\n@action(MessageAction.SUMMARY_CREATE)\ndef summary_create(payload, msg_id):\n begin_date = date.today()\n worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))\n .where(WorkerLog.created_at >= begin_date,\n WorkerLog.action == 'SUMMARY_CREATE')\n .first())\n\n if worker_log and worker_log.logs >= 5:\n return\n\n get_static_bomber(begin_date)\n\n\n# 得到当天工作的员工\ndef get_active_bomber(begin_date):\n bombers = (BomberR\n .select(BomberR.id)\n .where(BomberR.last_active_at >= begin_date))\n for bomber in bombers:\n (SummaryBomber.update(work_ind=1)\n .where(SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == bomber.id)\n ).execute()\n\n\n# 得到每个催收员每天拨打电话数和拨打件数\n@time_logger\ndef get_call_and_made(end_date, begin_date, real_time_query=False):\n call_sql = \"\"\"\n select \n bomber_id, \n count(case when relationship is not null then application_id end) \n as 'call_cnt', \n count(distinct case when relationship is not null then \n application_id end) as 'call_case',\n count(case when phone_status=4 then application_id end) as 'connect',\n count(distinct case when phone_status=4 then application_id end) \n as 'connect_case'\n from (\n select bomber_id,application_id,phone_status, cycle, relationship\n from bomber.call_actions ba\n where created_at>'%s' and created_at<'%s'\n and type in (0, 1)\n ) a\n group by 1\n \"\"\" % (begin_date, end_date)\n calls = run_all_sql(call_sql)\n if real_time_query:\n return calls\n for call in calls:\n bomber, call_cnt, case_made, connect_cnt, case_connect = call\n (SummaryBomber.update(\n case_made_cnt=case_made,\n call_cnt=call_cnt,\n call_connect_cnt=connect_cnt,\n case_connect_cnt=case_connect)\n .where(\n SummaryBomber.bomber_id == bomber,\n SummaryBomber.time == begin_date)\n ).execute()\n return calls\n\n\n# 得到每个催收员每天待催件数\n@time_logger\ndef get_claimed_cnt(end_date, begin_date, real_time_query=False):\n table_date = begin_date - timedelta(days=30)\n claimed_sql = \"\"\"\n SELECT\n COUNT( `t1`.`application_id` ) AS cnt,\n `t1`.`bomber_id` AS bomber_id \n FROM\n `dispatch_app_history` AS t1 \n WHERE\n ( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null ) \n AND ( `t1`.`bomber_id` != 1000 ) \n AND ( `t1`.`partner_id` IS null ) \n AND ( `t1`.`entry_at` > '%s' ) \n AND ( `t1`.`entry_at` < '%s' ) \n GROUP BY\n `t1`.`bomber_id`\n \"\"\" % (begin_date, table_date, end_date)\n claimeds = run_all_sql(claimed_sql)\n if real_time_query:\n return claimeds\n for claimed in claimeds:\n cnt, bomber_id = claimed\n (SummaryBomber.update(claimed_cnt=cnt)\n .where(SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == bomber_id)\n ).execute()\n return claimeds\n\n\n# 得到短信相关数据\ndef get_sms_data(end_data, begin_data):\n all_sms = (ConnectHistoryR\n .select(ConnectHistoryR.operator.alias('bomber_id'),\n fn.COUNT(ConnectHistoryR.application).alias('sms_send'))\n .where(ConnectHistoryR.created_at > begin_data,\n ConnectHistoryR.created_at < end_data,\n ConnectHistoryR.type.in_(ConnectType.sms()))\n .group_by(ConnectHistoryR.operator))\n\n for sms in all_sms:\n (SummaryBomber.update(sms_cnt=sms.sms_send)\n .where(SummaryBomber.time == begin_data,\n SummaryBomber.bomber_id == sms.bomber_id)\n ).execute()\n return all_sms\n\n\n# 得到ptp相关的数据\n@time_logger\ndef get_ptp_data(end_date, begin_date, real_query_time=False):\n sql = \"\"\"\n SELECT\n a.bomber_id,\n sum( a.promised_amount ) AS ptp_amount,\n count( application_id ) \n FROM\n bomber.auto_call_actions a\n LEFT JOIN bomber.bomber c ON a.bomber_id = c.id \n WHERE\n a.created_at >= '%s' \n AND a.created_at < '%s'\n AND a.promised_date != '' \n GROUP BY 1 \n UNION\n SELECT\n a.bomber_id,\n ifnull( sum( a.promised_amount ), 0 ) AS ptp_amount,\n count( application_id ) \n FROM\n bomber.bombing_history a\n LEFT JOIN bomber.bomber c ON a.bomber_id = c.id \n WHERE\n bomber_id NOT BETWEEN 151 \n AND 177 \n AND bomber_id NOT BETWEEN 181 \n AND 183 \n AND bomber_id != 72 \n AND a.created_at >= '%s' \n AND a.created_at < '%s' \n AND a.promised_date != '' \n GROUP BY 1\n \"\"\" % (begin_date, end_date, begin_date, end_date)\n ptp_datas = run_all_sql(sql)\n if real_query_time:\n return ptp_datas\n\n result = {}\n for ptp in ptp_datas:\n bomber_id, amount, cnt = ptp\n if bomber_id in result.keys():\n result[bomber_id][0] += amount\n result[bomber_id][1] += cnt\n continue\n result[bomber_id] = [amount, cnt]\n for key, value in result.items():\n (SummaryBomber\n .update(\n promised_cnt=value[1],\n promised_amount=value[0]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == key\n )).execute()\n return ptp_datas\n\n\n# 统计回款金额和回款件数\n@time_logger\ndef get_recover_amount(end_date, begin_date, real_time_query=False):\n C1_sql = \"\"\"\n SELECT a.current_bomber_id,\n sum(principal_part+late_fee_part) as pay_amount,\n count(distinct application_id)\n from \n (select a.cycle,a.current_bomber_id,b.username,a.principal_part,\n a.late_fee_part,a.application_id,a.repay_at\n FROM bomber.repayment_log a ,bomber.bomber b\n WHERE a.repay_at >= '%s' AND a.repay_at <'%s'\n AND a.current_bomber_id !=''\n AND a.current_bomber_id = b.id\n and b.role_id in (1,4)\n and principal_part+late_fee_part>0\n group by 6,7) a\n GROUP BY a.cycle,a.current_bomber_id\n \"\"\" % (begin_date, end_date)\n C1_results = run_all_sql(C1_sql)\n if not real_time_query:\n for C1_result in C1_results:\n bomber_id, amount, cnt = C1_result\n (SummaryBomber.update(\n cleared_cnt=cnt,\n cleared_amount=amount\n ).where(\n SummaryBomber.bomber_id == bomber_id,\n SummaryBomber.time == begin_date\n )).execute()\n\n other_sql = \"\"\"\n select current_bomber_id,sum(pay_amount) as pay_amount,\n count(distinct application_id)\n from (\n select application_id,current_bomber_id,pay_amount,repay_at\n from (\n select br.application_id,br.current_bomber_id,\n br.principal_part+br.late_fee_part as pay_amount,br.repay_at\n from bomber.repayment_log br\n left join bomber.bomber bb on br.current_bomber_id=bb.id\n where exists (select 1 from bomber.bombing_history bb \n where br.current_bomber_id=bb.bomber_id \n and br.application_id=bb.application_id \n and bb.created_at<br.repay_at \n and (bb.promised_date is not null \n or bb.promised_amount is not null))\n and br.repay_at >= '%s'\n and br.repay_at < '%s'\n and bb.role_id in (2,3,5,6,7,8,9) \n and br.principal_part+br.late_fee_part > 0\n group by 1,4\n ) a\n group by 1,4) b\n group by 1\n \"\"\" % (begin_date, end_date)\n sql_results = run_all_sql(other_sql)\n if not real_time_query:\n for sql_result in sql_results:\n bomber_id, amount, cnt = sql_result\n (SummaryBomber.update(\n cleared_cnt=cnt,\n cleared_amount=amount\n ).where(\n SummaryBomber.bomber_id == bomber_id,\n SummaryBomber.time == begin_date\n )).execute()\n result = sql_results + C1_results\n return result\n\n\n# summary 报表新数据(分布计算,先计算一部分数据)\n@action(MessageAction.SUMMARY_NEW)\ndef summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))\n .where(WorkerLog.created_at >= end_date,\n WorkerLog.action == 'SUMMARY_NEW')\n .first())\n\n if worker_log and worker_log.logs >= 5:\n return\n\n get_active_bomber(begin_date)\n get_call_and_made(end_date, begin_date)\n get_claimed_cnt(end_date, begin_date)\n get_sms_data(end_date, begin_date)\n get_ptp_data(end_date, begin_date)\n get_recover_amount(end_date, begin_date)\n get_unfollowed(begin_date)\n get_unfollowed_call(begin_date)\n\n\n# ------------------------ 计算summary bomber的另部分指标 ----------------------\n# 得到新件件数和金额\ndef get_new_case_amount(begin_date, end_date):\n all_case = (DispatchAppHistoryR\n .select(fn.SUM(DispatchAppHistoryR.entry_late_fee_pending +\n DispatchAppHistoryR.entry_principal_pending)\n .alias('pending'),\n DispatchAppHistoryR.bomber_id,\n fn.COUNT(DispatchAppHistoryR.application).alias('cnt'))\n .where(DispatchAppHistoryR.entry_at > begin_date,\n DispatchAppHistoryR.entry_at < end_date,\n DispatchAppHistoryR.partner_id.is_null(True))\n .group_by(DispatchAppHistoryR.bomber_id))\n for case in all_case:\n SummaryBomber.update(\n new_case_amount_sum=case.pending,\n new_case_cnt=case.cnt\n ).where(\n SummaryBomber.bomber_id == case.bomber_id,\n SummaryBomber.time == begin_date\n ).execute()\n return all_case\n\n\n# 得到KP相关数据\ndef get_kp_cleared(begin_date, end_date):\n auto_call_sql = \"\"\"\n SELECT\n a.current_bomber_id, count( b.application_id ) \n FROM\n (SELECT\n current_bomber_id, principal_part, late_fee_part,\n repay_at, application_id \n FROM\n bomber.repayment_log \n WHERE\n repay_at >= '%s' \n AND repay_at < '%s' \n GROUP BY 4, 5 ) a\n LEFT JOIN (\n SELECT\n cycle, bomber_id, promised_amount, promised_date,\n application_id, created_at \n FROM\n bomber.auto_call_actions \n WHERE\n created_at >= date_sub( '%s', INTERVAL 7 DAY ) \n AND created_at < '%s' \n AND promised_date IS NOT NULL \n ) b ON a.current_bomber_id = b.bomber_id \n AND a.application_id = b.application_id \n AND date( a.repay_at ) <= date( b.promised_date ) \n AND date( a.repay_at ) >= date( b.created_at )\n LEFT JOIN bomber.bomber c ON a.current_bomber_id = c.id \n WHERE\n b.promised_date >= '%s'\n GROUP BY 1\n \"\"\" % (begin_date, end_date, begin_date, end_date, begin_date)\n auto_call_results = run_all_sql(auto_call_sql)\n\n manual_sql = \"\"\"\n SELECT\n a.current_bomber_id, count( b.application_id ) \n FROM\n (SELECT\n current_bomber_id, principal_part, late_fee_part,\n repay_at, application_id, created_at \n FROM\n bomber.repayment_log \n WHERE\n repay_at >= '%s' \n AND repay_at < '%s' \n AND principal_part + late_fee_part > 0 \n GROUP BY 2, 5 ) a\n LEFT JOIN (\n SELECT\n cycle, bomber_id, promised_amount, promised_date, \n application_id, created_at\t\n FROM\n bomber.bombing_history \n WHERE\n created_at >= date_sub( '%s', INTERVAL 7 DAY ) \n AND created_at < '%s' \n AND promised_date IS NOT NULL \n ) b ON a.current_bomber_id = b.bomber_id \n AND a.application_id = b.application_id \n AND date( a.repay_at ) <= date( b.promised_date ) \n AND date( a.repay_at ) >= date( b.created_at )\n LEFT JOIN bomber.bomber c ON a.current_bomber_id = c.id \n WHERE\n b.promised_date >= '%s'\n GROUP BY 1\n \"\"\" % (begin_date, end_date, begin_date, end_date, begin_date)\n manual_results = run_all_sql(manual_sql)\n\n sql_result = auto_call_results + manual_results\n result = {}\n for data in sql_result:\n if data[0] in result.keys():\n result[data[0]] += data[1]\n continue\n result[data[0]] = data[1]\n for key, value in result.items():\n (SummaryBomber\n .update(\n KP_cleared_cnt=value\n ).where(\n SummaryBomber.bomber_id == key,\n SummaryBomber.time == begin_date)\n ).execute()\n\n\n# 得到当天处于ptp的件(KP率的分母)\ndef get_kp_today(begin_date, end_date):\n sql = \"\"\"\n select bomber_id, count(distinct application_id)\n from( \n SELECT bomber_id, application_id\n FROM bomber.auto_call_actions a\n WHERE promised_date >= '%s' AND created_at < '%s' \n AND EXISTS(select 1 from bomber.application ba \n where a.application_id=ba.id \n and (ba.finished_at is null \n or ba.finished_at > '%s'))\n UNION \n SELECT bomber_id, application_id\n FROM bomber.bombing_history b\n WHERE promised_date >= '%s' AND created_at < '%s'\n AND EXISTS(select 1 from bomber.application ba \n where b.application_id=ba.id \n and (ba.finished_at is null \n or ba.finished_at > '%s'))) result\n GROUP BY 1\n \"\"\" % (begin_date, end_date, begin_date, begin_date, end_date, begin_date)\n kp_today = run_all_sql(sql)\n\n for kp in kp_today:\n (SummaryBomber.update(\n KP_today_cnt=kp[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == kp[0]\n )).execute()\n\n\n# 得到ptp相关信息(当日ptp到期件数、次日到期件数)\ndef get_ptp_cnt(begin_date, end_date):\n today_due = []\n for sql_date in (begin_date, end_date):\n sql = \"\"\"\n select bomber_id,count(distinct application_id) as cnt from \n ( # 自动外呼中排除掉已经修改P期的件\n select application_id,bomber_id,created_at \n from bomber.auto_call_actions ba \n where promised_date ='%s' # 需要过滤掉在手动中续P的\n and not exists ( select 1 from bomber.bombing_history bb \n where bb.application_id = ba.application_id \n and bb.bomber_id = ba.bomber_id \n and bb.created_at>ba.created_at\n and bb.promised_date is not null \n and bb.created_at < '%s')\n union #历史记录,排除因为续P,导致这个件不在当日的P中\n select b.application_id,b.bomber_id,a.cdt\n from bomber.bombing_history b\n inner join (\n select application_id,bomber_id,max(created_at) as cdt \n from bomber.bombing_history bb\n where bb.created_at>date_sub('%s',interval 7 day)\n and bb.created_at<'%s'\n and promised_date is not null\n group by 1,2) a \n on b.application_id=a.application_id \n and b.bomber_id=a.bomber_id and a.cdt=b.created_at\n where b.promised_date ='%s'\n union #当天下的当天的P\n select b.application_id,b.bomber_id,b.created_at\n from bomber.bombing_history b\n where b.promised_date ='%s'\n and b.created_at>'%s'\n and b.created_at<date_add('%s',interval 1 day)\n ) a\n where exists(select 1 from bomber.application ba \n where ba.id=a.application_id \n and ((ba.finished_at is null) \n or (ba.finished_at > '%s')))\n group by 1\n \"\"\" % (sql_date, begin_date, begin_date, begin_date, sql_date,\n sql_date, begin_date, begin_date, begin_date)\n datas = run_all_sql(sql)\n\n if sql_date == begin_date:\n today_due = datas\n for data in datas:\n (SummaryBomber.update(\n ptp_today_cnt=data[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == data[0]\n )).execute()\n continue\n nextday_due = datas\n for data in datas:\n (SummaryBomber.update(\n ptp_next_cnt=data[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == data[0]\n )).execute()\n return [today_due, nextday_due]\n\n\n# 得到ptp维护的相关信息\ndef get_ptp_call_cnt(begin_date, end_date):\n today_followed = []\n for sql_data in (begin_date, end_date):\n sql = \"\"\"\n select b.bomber_id,count(distinct b.application_id) as cnt \n from (\n select a.* from \n (\n select application_id,bomber_id,created_at \n from bomber.auto_call_actions ba \n where promised_date ='%s' # 需要过滤掉在手动中续P的\n and not exists (select 1 from bomber.bombing_history bb \n where bb.application_id = ba.application_id \n and bb.bomber_id = ba.bomber_id \n and bb.created_at>ba.created_at \n and bb.promised_date is not null \n and bb.created_at < '%s')\n union #历史记录,排除因为续P,导致这个件不在当日的P中\n select b.application_id,b.bomber_id,a.cdt\n from bomber.bombing_history b\n inner join (\n select application_id,bomber_id,max(created_at) as cdt \n from bomber.bombing_history bb\n where bb.created_at>date_sub('%s',interval 7 day)\n and bb.created_at<'%s'\n and promised_date is not null\n group by 1,2) a \n on b.application_id=a.application_id \n and b.bomber_id=a.bomber_id and a.cdt=b.created_at\n where b.promised_date ='%s'\n union #当天下的当天的P\n select b.application_id,b.bomber_id,b.created_at\n from bomber.bombing_history b\n where b.promised_date ='%s'\n and b.created_at>'%s'\n and b.created_at<date_add('%s',interval 1 day)\n ) a\n where exists(select 1 from bomber.application ba \n where ba.id=a.application_id \n and ((ba.finished_at is null) \n or (ba.finished_at > '%s')))\n and exists(select 1 from bomber.call_actions bc \n where a.application_id = bc.application_id \n and a.bomber_id = bc.bomber_id \n and bc.created_at>'%s' \n and bc.created_at< date_add('%s',interval 1 day) \n and bc.created_at>=a.created_at)\n union \n select a.* from \n (\n select application_id,bomber_id,created_at \n from bomber.auto_call_actions ba \n where promised_date ='%s' # 需要过滤掉在手动中续P的\n and not exists ( select 1 from bomber.bombing_history bb \n where bb.application_id = ba.application_id \n and bb.bomber_id = ba.bomber_id \n and bb.created_at>ba.created_at \n and bb.promised_date is not null \n and bb.created_at < '%s')\n union #历史记录,排除因为续P,导致这个件不在当日的P中\n select b.application_id,b.bomber_id,a.cdt\n from bomber.bombing_history b\n inner join (\n select application_id,bomber_id,max(created_at) as cdt \n from bomber.bombing_history bb\n where bb.created_at>date_sub('%s',interval 7 day)\n and bb.created_at<'%s'\n and promised_date is not null\n group by 1,2) a \n on b.application_id=a.application_id \n and b.bomber_id=a.bomber_id and a.cdt=b.created_at\n where b.promised_date ='%s'\n union #当天下的当天的P\n select b.application_id,b.bomber_id,b.created_at\n from bomber.bombing_history b\n where b.promised_date ='%s'\n and b.created_at>'%s'\n and b.created_at<date_add('%s',interval 1 day)\n ) a\n where exists(select 1 from bomber.application ba \n where ba.id=a.application_id \n and ba.finished_at > '%s' \n and ba.finished_at< date_add('%s',interval 1 day))\n ) b\n group by 1\n \"\"\" % (sql_data, begin_date, begin_date, begin_date, sql_data,\n sql_data, begin_date, begin_date, begin_date, begin_date,\n begin_date, sql_data, begin_date, begin_date, begin_date,\n sql_data, sql_data, begin_date, begin_date, begin_date,\n begin_date)\n datas = run_all_sql(sql)\n\n if sql_data == begin_date:\n today_followed = datas\n for data in datas:\n (SummaryBomber.update(\n ptp_today_call_cnt=data[1]\n ).where(\n SummaryBomber.bomber_id == data[0],\n SummaryBomber.time == begin_date\n )).execute()\n continue\n nextday_followed = datas\n for data in datas:\n (SummaryBomber.update(\n ptp_next_call_cnt=data[1]\n ).where(\n SummaryBomber.bomber_id == data[0],\n SummaryBomber.time == begin_date\n )).execute()\n return [today_followed, nextday_followed]\n\n\n# 得到新件还款金额(只有c2、c3才有新件还款的概念)\ndef get_new_case_cleared(begin_date, end_date):\n sql = \"\"\"\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\tbomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c2_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c2_entry > '%s' \n AND ba.c2_entry < '%s' \n ) a \n GROUP BY 1 \n UNION\n SELECT\n ptp_bomber AS bomber_id,\n sum( paid_amount ) AS pending \n FROM\n (SELECT\n br.late_fee_part + br.principal_part AS paid_amount,\n br.ptp_bomber\n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c3_entry ) = date( br.repay_at ) \n AND br.ptp_bomber is not null\n WHERE ba.c3_entry > '%s' \n AND ba.c3_entry < '%s' \n ) a\n GROUP BY 1\n \"\"\" % (begin_date, end_date, begin_date, end_date,begin_date, end_date)\n case_cleared_sums = run_all_sql(sql)\n\n for clear in case_cleared_sums:\n (SummaryBomber.update(\n new_case_cleared_sum=clear[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == clear[0]\n )).execute()\n\n\n# 新件当日维护件数\n@time_logger\ndef get_new_case_call(begin_date, end_date, real_query_time=False):\n sql = \"\"\"\n SELECT\n bd.bomber_id,\n count( DISTINCT bd.application_id )\n FROM\n bomber.dispatch_app_history bd\n INNER JOIN bomber.call_actions bc \n ON bd.application_id = bc.application_id \n AND bd.bomber_id = bc.bomber_id \n AND date( bd.entry_at ) = date( bc.created_at ) \n WHERE\n entry_at > '%s' \n AND entry_at < '%s' \n AND partner_id IS NULL \n GROUP BY 1\n \"\"\" % (begin_date, end_date)\n new_case_calls = run_all_sql(sql)\n\n if real_query_time:\n return new_case_calls\n\n for call in new_case_calls:\n (SummaryBomber.update(\n new_case_call_cnt=call[1]\n ).where(\n SummaryBomber.bomber_id == call[0],\n SummaryBomber.time == begin_date\n )).execute()\n return new_case_calls\n\n\n# 得到接通件均通话时长\n@time_logger\ndef get_calltime_avg(begin_date, end_date, real_query_time=False):\n autos_sql = \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction,\n count( 1 ) AS auto_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' ' \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\" % (begin_date, end_date)\n autos = run_all_sql(autos_sql)\n\n manual_sql = \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND an.STATUS = 'ANSWERED' \n AND bb.id IS NOT NULL \n AND an.recording is not null\n GROUP BY 1\n \"\"\" % (begin_date, end_date, '5%', '3%')\n manuals = run_all_sql(manual_sql)\n\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]][0] += data[1]\n result[data[0]][1] += data[2]\n continue\n result[data[0]] = [data[1], data[2]]\n\n if real_query_time:\n return result\n\n for key, value in result.items():\n (SummaryBomber.update(\n calltime_case_sum=value[0],\n calltime_case_cnt=value[1],\n calltime_case_avg=value[0] / value[1] if value[1] else 0\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == key\n )).execute()\n return result\n\n\n# 得到等待时长相关数据\ndef get_no_calltime_avg(begin_date, end_date):\n manual_sql = \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction,\n count( 1 ) AS manual_jt_cnt \n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND (an.status!='ANSWERED' or an.recording is null) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\" % (begin_date, end_date, '5%', '3%')\n manuals = run_all_sql(manual_sql)\n\n for data in manuals:\n (SummaryBomber.update(\n calltime_no_case_sum=data[1],\n calltime_no_case_cnt=data[2],\n calltime_no_case_avg=data[1] / data[2] if data[2] else 0\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == data[0]\n )).execute()\n\n\n# 得到通话总时长\n@time_logger\ndef get_calltime_sum(begin_date, end_date, real_query_time=False):\n autos_sql = \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS auto_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.username = bb.username \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND an.username != ' '\n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\" % (begin_date, end_date)\n autos = run_all_sql(autos_sql)\n\n manual_sql = \"\"\"\n SELECT\n bb.id AS bomber_id,\n sum( talkduraction ) AS manual_talkduraction\n FROM\n auto_call.newcdr an\n LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext \n WHERE\n an.timestart >= '%s' \n AND an.timestart < '%s' \n AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) ) \n AND bb.id IS NOT NULL \n GROUP BY 1\n \"\"\" % (begin_date, end_date, '5%', '3%')\n manuals = run_all_sql(manual_sql)\n\n datas = autos + manuals\n result = {}\n for data in datas:\n if data[0] in result.keys():\n result[data[0]] += data[1]\n continue\n result[data[0]] = data[1]\n if real_query_time:\n return result\n for key, value in result.items():\n (SummaryBomber.update(\n calltime_sum=value\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == key\n )).execute()\n return result\n\n\n# 当天未跟进的件\ndef get_unfollowed(begin_date):\n sql = \"\"\"\n SELECT\n bomber_id,\n count(1)\n FROM\n (\n SELECT\n bd.application_id,\n date(bd.entry_at) AS entry_at,\n bd.bomber_id,\n date(bd.out_at) AS out_at\n FROM\n bomber.dispatch_app_history bd\n WHERE\n (\n out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)\n OR out_at IS NULL\n )\n AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)\n AND partner_id IS NULL\n AND NOT EXISTS (\n SELECT\n 1\n FROM\n bomber.call_actions bc\n WHERE\n bd.bomber_id = bc.bomber_id\n AND bc.application_id = bd.application_id\n AND bc.created_at < '%(begin_date)s'\n )\n ) a\n GROUP BY\n 1\n \"\"\" % {'begin_date': begin_date}\n data = run_all_sql(sql)\n\n result = defaultdict(int)\n for d in data:\n result[d[0]] += d[1]\n\n bomber_list = []\n for key, value in result.items():\n bomber_list.append(key)\n (SummaryBomber.update(\n unfollowed_cnt=SummaryBomber.new_case_cnt + value\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == key\n )).execute()\n\n # 剩下bomber_id直接由new_case_cnt赋值\n (SummaryBomber.update(\n unfollowed_cnt=SummaryBomber.new_case_cnt\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id.not_in(bomber_list)\n )).execute()\n\n\n# 未跟进件中当天跟进件数\ndef get_unfollowed_call(begin_date):\n sql = \"\"\"\n SELECT\n bomber_id,\n count(1)\n FROM\n (\n SELECT\n bd.application_id,\n date(bd.entry_at) AS entry_at,\n bd.bomber_id,\n date(bd.out_at) AS out_at\n FROM\n bomber.dispatch_app_history bd\n WHERE\n (\n out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)\n OR out_at IS NULL\n )\n AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)\n AND partner_id IS NULL\n AND NOT EXISTS (\n SELECT\n 1\n FROM\n bomber.call_actions bc\n WHERE\n bd.bomber_id = bc.bomber_id\n AND bc.application_id = bd.application_id\n AND bc.created_at < '%(begin_date)s'\n )\n ) a\n WHERE\n EXISTS (\n SELECT\n 1\n FROM\n bomber.call_actions bc\n WHERE\n a.application_id = bc.application_id\n AND a.bomber_id = bc.bomber_id\n AND bc.created_at > '%(begin_date)s'\n AND bc.created_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n AND bc.created_at >= a.entry_at\n )\n OR EXISTS (\n SELECT\n 1\n FROM\n bomber.application ba\n WHERE\n ba.id = a.application_id\n AND ba.finished_at > '%(begin_date)s'\n AND ba.finished_at < date_add('%(begin_date)s', INTERVAL 1 DAY)\n )\n GROUP BY\n 1\n \"\"\" % {'begin_date': begin_date}\n data = run_all_sql(sql)\n\n result = defaultdict(int)\n for d in data:\n result[d[0]] += d[1]\n\n bomber_list = []\n for key, value in result.items():\n bomber_list.append(key)\n (SummaryBomber.update(\n unfollowed_call_cnt=SummaryBomber.new_case_call_cnt + value\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == key\n )).execute()\n\n # 剩下bomber_id直接由new_case_cnt赋值\n update_sql = (SummaryBomber\n .update(unfollowed_call_cnt=SummaryBomber.new_case_call_cnt)\n .where(SummaryBomber.time == begin_date))\n if bomber_list:\n update_sql = update_sql.where(SummaryBomber.bomber_id\n .not_in(bomber_list))\n update_sql.execute()\n return result\n\n\n# summary 更新新的数据(计算summary_bomber的另一部分数据)\n@action(MessageAction.UPDATE_SUMMARY_NEW)\ndef update_summary_new(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n\n worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))\n .where(WorkerLog.created_at >= end_date,\n WorkerLog.action == 'UPDATE_SUMMARY_NEW')\n .first())\n if worker_log and worker_log.logs >= 5:\n return\n\n get_new_case_amount(begin_date, end_date)\n get_kp_cleared(begin_date, end_date)\n get_kp_today(begin_date, end_date)\n get_ptp_cnt(begin_date, end_date)\n get_ptp_call_cnt(begin_date, end_date)\n get_new_case_cleared(begin_date, end_date)\n get_new_case_call(begin_date, end_date)\n get_calltime_avg(begin_date, end_date)\n get_no_calltime_avg(begin_date, end_date)\n get_calltime_sum(begin_date, end_date)\n\n\n# -------------------------------- 得到cycle层的数据 --------------------------\ndef get_cycle_claimed(begin_date, end_date):\n sql = \"\"\"\n select cycle,count(1)\n from bomber.application where cycle in (1,2,3,4)\n and (finished_at is null or (finished_at>'%s'))\n and created_at>'2018-09-01'\n group by 1\n \"\"\" % begin_date\n result = run_all_sql(sql)\n return result\n\n\n# 得到cycle层的新件件数和金额\n@time_logger\ndef cycle_new_case(begin_date, end_date, real_time_query=False):\n sql = \"\"\"\n SELECT\n 1 AS cycle,\n count( ba.id ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.created_at ) = bo.which_day_overdue \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c1b_entry ) = bo.which_day_overdue\n WHERE\n c1b_entry > '%s' \n AND c1b_entry < '%s' \n UNION\n SELECT\n 3 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c2_entry ) = bo.which_day_overdue \n WHERE\n c2_entry > '%s' \n AND c2_entry < '%s' \n UNION\n SELECT\n 4 AS cycle,\n count( 1 ),\n sum( bo.principal_pending + late_fee_pending + \n interest_pending ) AS pending \n FROM\n bomber.application ba\n INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id \n AND date( ba.c3_entry ) = bo.which_day_overdue\n WHERE\n c3_entry > '%s' \n AND c3_entry < '%s'\n \"\"\" % (begin_date, end_date, begin_date, end_date,\n begin_date, end_date, begin_date, end_date)\n all_datas = run_all_sql(sql)\n\n if real_time_query:\n return all_datas\n\n for data in all_datas:\n (SummaryBomber.update(\n new_case_amount_sum=data[2],\n new_case_cnt=data[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.bomber_id == data[0],\n SummaryBomber.cycle == data[0]\n )).execute()\n return all_datas\n\n\n# 新件当日维护件数\n@time_logger\ndef get_cycle_new_case_call(begin_date, end_date, real_time_query=False):\n sql = \"\"\"\n SELECT\n 1 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.created_at ) = date( bc.created_at ) \n WHERE\n ba.created_at > '%s' \n AND ba.created_at < '%s' \n UNION\n SELECT\n 2 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c1b_entry ) = date( bc.created_at ) \n WHERE\n ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s'\n UNION\n SELECT\n 3 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c2_entry ) = date( bc.created_at ) \n WHERE\n ba.c2_entry > '%s' \n AND ba.c2_entry < '%s'\n UNION\n SELECT\n 4 AS cycle,\n count( DISTINCT ba.id ) \n FROM\n bomber.application ba\n INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id \n AND date( ba.c3_entry ) = date( bc.created_at ) \n WHERE\n ba.c3_entry > '%s' \n AND ba.c3_entry < '%s'\n \"\"\" % (begin_date, end_date, begin_date, end_date,\n begin_date, end_date, begin_date, end_date)\n cycle_datas = run_all_sql(sql)\n\n if real_time_query:\n return cycle_datas\n\n for data in cycle_datas:\n (SummaryBomber.update(\n new_case_call_cnt=data[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.cycle == data[0],\n SummaryBomber.bomber_id == data[0]\n )).execute()\n return cycle_datas\n\n\ndef get_cycle_new_case_cleared(begin_date, end_date):\n sql = \"\"\"\n SELECT\n '1' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.created_at ) = date( br.repay_at ) \n WHERE ba.created_at > '%s' \n AND ba.created_at < '%s' \n GROUP BY 1, 2 ) a \n UNION\n SELECT \n '2' AS cycle, count( DISTINCT id ), \n sum( paid_amount ) AS pending \n FROM\n (SELECT ba.id, br.repay_at, \n br.late_fee_part + br.principal_part AS paid_amount \n FROM\n bomber.application ba\n INNER JOIN bomber.repayment_log br ON ba.id = br.application_id \n AND date( ba.c1b_entry ) = date( br.repay_at ) \n WHERE ba.c1b_entry > '%s' \n AND ba.c1b_entry < '%s' \n GROUP BY 1, 2) a\n \"\"\" % (begin_date, end_date, begin_date, end_date)\n cycle_cleared = run_all_sql(sql)\n\n for i in cycle_cleared:\n (SummaryBomber.update(\n new_case_cleared_sum=i[2]\n ).where(\n SummaryBomber.cycle == i[0],\n SummaryBomber.bomber_id == i[0],\n SummaryBomber.time == begin_date\n )).execute()\n\n\ndef get_cycle_case_made_cnt(begin_date, end_date):\n sql = \"\"\"\n select cycle,count(distinct application) from (\n select distinct cycle,application from bomber.auto_call_list_record\n where created_at >= '%s'\n and created_at < '%s'\n and called_counts <> 0\n and cycle in (1,2,3,4)\n union\n select distinct cycle,application_id from bomber.call_actions\n where created_at >= '%s'\n and created_at < '%s'\n and cycle in (1,2,3,4)\n ) c\n group by 1\n \"\"\" % (begin_date, end_date, begin_date, end_date)\n case_made_datas = run_all_sql(sql)\n\n for case_made_data in case_made_datas:\n (SummaryBomber.update(\n case_made_cnt=case_made_data[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.cycle == case_made_data[0],\n SummaryBomber.bomber_id == case_made_data[0]\n )).execute()\n\n\n# 得到cycle維度的数据\n@action(MessageAction.SUMMARY_NEW_CYCLE)\ndef summary_new_cycle(payload, msg_id):\n end_date = date.today()\n begin_date = end_date - timedelta(days=1)\n\n worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))\n .where(WorkerLog.created_at >= end_date,\n WorkerLog.action == 'SUMMARY_NEW_CYCLE')\n .first())\n if worker_log and worker_log.logs >= 5:\n return\n\n cycle_datas = (SummaryBomber\n .select(fn.SUM(SummaryBomber.new_case_amount_sum)\n .alias('new_case_amount_sum'),\n fn.SUM(SummaryBomber.new_case_cleared_sum)\n .alias('new_case_cleared_sum'),\n fn.SUM(SummaryBomber.case_made_cnt)\n .alias('case_made_cnt'),\n fn.SUM(SummaryBomber.case_connect_cnt)\n .alias('case_connect_cnt'),\n fn.SUM(SummaryBomber.promised_cnt)\n .alias('promised_cnt'),\n fn.SUM(SummaryBomber.promised_amount)\n .alias('promised_amount'),\n fn.SUM(SummaryBomber.cleared_cnt)\n .alias('cleared_cnt'),\n fn.SUM(SummaryBomber.cleared_amount)\n .alias('cleared_amount'),\n fn.SUM(SummaryBomber.new_case_cnt)\n .alias('new_case_cnt'),\n fn.SUM(SummaryBomber.new_case_call_cnt)\n .alias('new_case_call_cnt'),\n fn.SUM(SummaryBomber.unfollowed_cnt)\n .alias('unfollowed_cnt'),\n fn.SUM(SummaryBomber.unfollowed_call_cnt)\n .alias('unfollowed_call_cnt'),\n fn.SUM(SummaryBomber.call_cnt).alias('call_cnt'),\n fn.SUM(SummaryBomber.sms_cnt).alias('sms_cnt'),\n fn.SUM(SummaryBomber.call_connect_cnt)\n .alias('call_connect_cnt'),\n fn.SUM(SummaryBomber.ptp_today_cnt)\n .alias('ptp_today_cnt'),\n fn.SUM(SummaryBomber.ptp_today_call_cnt)\n .alias('ptp_today_call_cnt'),\n fn.SUM(SummaryBomber.ptp_next_cnt)\n .alias('ptp_next_cnt'),\n fn.SUM(SummaryBomber.ptp_next_call_cnt)\n .alias('ptp_next_call_cnt'),\n fn.SUM(SummaryBomber.KP_cleared_cnt)\n .alias('KP_cleared_cnt'),\n fn.SUM(SummaryBomber.KP_today_cnt)\n .alias('KP_today_cnt'),\n fn.SUM(SummaryBomber.work_ind).alias('work_ind'),\n fn.SUM(SummaryBomber.calltime_sum)\n .alias('calltime_sum'),\n fn.SUM(SummaryBomber.calltime_case_sum)\n .alias('calltime_case_sum'),\n fn.SUM(SummaryBomber.calltime_case_cnt)\n .alias('calltime_case_cnt'),\n fn.SUM(SummaryBomber.calltime_no_case_sum)\n .alias('calltime_no_case_sum'),\n fn.SUM(SummaryBomber.calltime_no_case_cnt)\n .alias('calltime_no_case_cnt'),\n SummaryBomber.cycle.alias('cycle'))\n .where(SummaryBomber.time == begin_date,\n SummaryBomber.cycle << Cycle.values())\n .group_by(SummaryBomber.cycle))\n\n for cycle_data in cycle_datas:\n SummaryBomber.create(\n bomber_id=cycle_data.cycle,\n time=begin_date,\n cycle=cycle_data.cycle,\n new_case_amount_sum=cycle_data.new_case_amount_sum, # 新件金额(同上)\n new_case_cleared_sum=cycle_data.new_case_cleared_sum, # 新件还款(同上)\n new_case_cleard_rate=0,\n case_made_cnt=cycle_data.case_made_cnt, # 拨打件数\n case_made_rate=0,\n case_connect_cnt=cycle_data.case_connect_cnt, # 接通件数\n case_connect_rate=0,\n promised_cnt=cycle_data.promised_cnt, # ptp件数\n promised_amount=cycle_data.promised_amount, # ptp金额\n cleared_cnt=cycle_data.cleared_cnt, # 回款件数\n cleared_amount=cycle_data.cleared_amount, # 回款金额\n new_case_cnt=cycle_data.new_case_cnt, # 新件数量(1,2待算)\n new_case_call_cnt=cycle_data.new_case_call_cnt, # 新件拨打数(同上)\n unfollowed_cnt=cycle_data.unfollowed_cnt,\n unfollowed_call_cnt=cycle_data.unfollowed_call_cnt,\n call_cnt=cycle_data.call_cnt, # 拨打电话数\n sms_cnt=cycle_data.sms_cnt, # 发送短信数\n call_connect_cnt=cycle_data.call_connect_cnt, # 接通电话数\n calltime_case_avg=0, # 接通件均通话时长 (全部待算)\n ptp_today_cnt=cycle_data.ptp_today_cnt, # 当日ptp件数\n ptp_today_call_cnt=cycle_data.ptp_today_call_cnt, # 当日ptp到期维护件数\n ptp_next_cnt=cycle_data.ptp_next_cnt, # 次日ptp到期数\n ptp_next_call_cnt=cycle_data.ptp_next_call_cnt, # 次日到期维护数\n KP_cleared_cnt=cycle_data.KP_cleared_cnt, # kp回款件\n KP_today_cnt=cycle_data.KP_today_cnt, # 当日处于ptp件数\n KP_cleared_rate=0,\n work_ind=cycle_data.work_ind, # 当日是否工作\n calltime_sum=cycle_data.calltime_sum, # 通话总时长\n calltime_case_sum=cycle_data.calltime_case_sum,\n calltime_case_cnt=cycle_data.calltime_case_cnt,\n calltime_no_case_sum=cycle_data.calltime_no_case_sum,\n calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,\n work_time_sum=cycle_data.work_time_sum # 工作时长\n )\n\n cycle_claimed = get_cycle_claimed(begin_date, end_date)\n for claimed in cycle_claimed:\n (SummaryBomber.update(\n claimed_cnt=claimed[1]\n ).where(\n SummaryBomber.time == begin_date,\n SummaryBomber.cycle == claimed[0],\n SummaryBomber.bomber_id == claimed[0]\n )).execute()\n\n # 得到新件件数和金额\n cycle_new_case(begin_date, end_date)\n\n # 得到新件维护件数\n get_cycle_new_case_call(begin_date, end_date)\n\n # 得到新件還款金額\n get_cycle_new_case_cleared(begin_date, end_date)\n\n # 修改cycle的拨打件数(累加对于预测试外呼都是打通的)\n get_cycle_case_made_cnt(begin_date, end_date)\n\n # 得到计算类数据(各比率)\n all_datas = (SummaryBomber.filter(SummaryBomber.time == begin_date))\n for data in all_datas:\n cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum\n if data.new_case_amount_sum else 0) * 100\n data.new_case_cleard_rate = cl_rat\n\n case_made_rate = (data.case_made_cnt / data.claimed_cnt\n if data.claimed_cnt else 0) * 100\n data.case_made_rate = case_made_rate\n\n case_connect_rate = (data.case_connect_cnt / data.case_made_cnt\n if data.case_made_cnt else 0) * 100\n data.case_connect_rate = case_connect_rate\n\n calltime_case_avg = (data.calltime_case_sum / data.calltime_case_cnt\n if data.calltime_case_cnt else 0)\n data.calltime_case_avg = calltime_case_avg\n\n calltime_no_case_avg = (data.calltime_no_case_sum /\n data.calltime_no_case_cnt\n if data.calltime_no_case_cnt else 0)\n data.calltime_no_case_avg = calltime_no_case_avg\n\n KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt\n if data.KP_today_cnt else 0) * 100\n data.KP_cleared_rate = KP_cleared_rate\n\n data.save()\n\n\n@action(MessageAction.MODIFY_BILL)\ndef modify_bill(payload, msg_id):\n application_id = payload.get('external_id')\n principal_paid = Decimal(payload.get('principal_paid', 0))\n late_fee = Decimal(payload.get('late_fee', 0))\n late_fee_paid = Decimal(payload.get('late_fee_paid', 0))\n overdue_days = payload.get('overdue_days')\n sub_bill_id = payload.get('bill_sub_id')\n partner_bill_id = payload.get('partner_bill_id')\n if not application_id:\n logging.warning('payload has no external_id. {}'.format(str(payload)))\n return\n if not overdue_days:\n logging.info(\"application %s not overdue\" % application_id)\n return\n\n item = (OldLoanApplication\n .get_or_none(OldLoanApplication.application_id ==\n application_id))\n if item:\n start_old_application(item, cancel=True)\n\n overdue_bill = (OverdueBill.select()\n .where(OverdueBill.external_id == application_id,\n OverdueBill.sub_bill_id == sub_bill_id)\n .first())\n application = (Application.filter(Application.id == application_id)\n .first())\n if not overdue_bill:\n if not application:\n logging.info('application %s not in bomber, let it in bomber now',\n application_id)\n send_to_default_q(MessageAction.APPLICATION_BOMBER, {\n 'id': application_id,\n 'bill_sub_id': sub_bill_id\n })\n return\n else:\n application = (Application\n .filter(Application.id == overdue_bill.collection_id)\n .first())\n\n with db.atomic():\n application.status = ApplicationStatus.UNCLAIMED.value\n application.finished_at = None\n application.paid_at = None\n application.save()\n if overdue_bill:\n overdue_bill.status = ApplicationStatus.UNCLAIMED.value\n overdue_bill.finished_at = None\n overdue_bill.save()\n repayment = (RepaymentLog.update(no_active = 1)\n .where(RepaymentLog.application == application.id,\n RepaymentLog.partner_bill_id == partner_bill_id,\n RepaymentLog.overdue_bill_id == overdue_bill.id))\n else:\n repayment = (RepaymentLog.update(no_active=1)\n .where(RepaymentLog.application == application.id,\n RepaymentLog.partner_bill_id == partner_bill_id))\n repayment_num = repayment.execute()\n logging.info(\"modify_bill no active repayment count:%s\" % repayment_num)\n\n if not application.latest_bomber_id:\n return\n\n bomber_id = application.latest_bomber_id\n (DispatchAppHistory.update(\n out_at=None,\n out_overdue_days=overdue_days,\n out_principal_pending=(application.amount - principal_paid),\n out_late_fee_pending=(late_fee - late_fee_paid)\n ).where(\n DispatchAppHistory.application == application.id,\n DispatchAppHistory.bomber_id == bomber_id)).execute()\n\n\n# 获取改变的ids\ndef get_change_bomber():\n cycle_role_map = {5: Cycle.C1B.value, 6: Cycle.C2.value, 8: Cycle.C3.value}\n result = {}\n bomber_logs = (BomberLog.select(BomberLog.bomber_id,\n BomberLog.role_id,\n BomberLog.operation,\n Bomber.group_id)\n .join(Bomber, JOIN_INNER,\n on=BomberLog.bomber_id == Bomber.id)\n .where(fn.DATE(BomberLog.created_at) == date.today(),\n BomberLog.role_id << list(cycle_role_map.keys()),#C1b,c2,c3\n BomberLog.operation << (0, 1), #0删除,1创建,3修改\n Bomber.instalment == 0) #催收单期的员工\n .dicts())\n for b_log in bomber_logs:\n cycle = cycle_role_map.get(b_log[\"role_id\"])\n group_id = b_log[\"group_id\"]\n if cycle in result:\n if group_id not in result[cycle]:\n result[cycle][group_id] = {\n \"cycle\": cycle,\n \"del_ids\": [],\n \"new_ids\": []\n }\n else:\n result[cycle] = {group_id: {\n \"cycle\": cycle,\n \"del_ids\": [],\n \"new_ids\": []}\n }\n if b_log[\"operation\"] == 0:\n result[cycle][group_id][\"del_ids\"].append(b_log[\"bomber_id\"])\n # result 有值表示有人员变动\n if result:\n bombers = (Bomber.select()\n .where(Bomber.role.in_(list(cycle_role_map.keys())),\n Bomber.is_del == 0,\n Bomber.instalment == 0))\n for b in bombers:\n cycle_result = result.get(cycle_role_map[b.role_id], {})\n role_result = cycle_result.get(b.group_id)\n if not role_result:\n continue\n role_result[\"new_ids\"].append(b.id)\n resutl_list = []\n for cycle, group_dict in result.items():\n resutl_list.extend(list(group_dict.values()))\n return resutl_list\n return []\n\n\n# 获取所有的application\ndef get_total_application(cycle, del_ids, new_ids,\n type=ApplicationType.CASH_LOAN.value):\n bomber_list = del_ids + new_ids\n all_apps = (Application.select(Application.id,\n Application.latest_bomber_id.alias(\n \"latest_bomber_id\"),\n Application.promised_date,\n Bomber.partner_id.alias(\"partner_id\"))\n .join(Bomber, JOIN_LEFT_OUTER,\n Application.latest_bomber == Bomber.id)\n .where(Application.cycle == cycle,\n Application.status != ApplicationStatus.REPAID.value,\n Application.latest_bomber_id << bomber_list,\n Application.type == type)\n .order_by(Application.id)\n .dicts())\n return all_apps\n\n\n# 获取平均数列表,即每个bomber的平均件的数量\ndef get_average_number(app_nums, bomber_nums):\n average = app_nums // bomber_nums\n remainder = app_nums % bomber_nums\n average_list = [average for i in range(bomber_nums)]\n if remainder == 0:\n return average_list\n for i in range(remainder):\n average_list[i] += 1\n # 对结果进行一下随机,不然每次都是前几个人多件\n random.shuffle(average_list)\n return average_list\n\n\n# 对appliciton进行分类统计\ndef classified_statistic_apps(apps):\n result = {}\n # 根据用户的bomber_id 对数据进行分类统计\n for app in apps:\n # 将用户下p和没下p的件分开\n latest_bomber_id = app[\"latest_bomber_id\"]\n if latest_bomber_id not in result:\n result[latest_bomber_id] = {\n \"bid\":latest_bomber_id,\n \"p_list\": [],\n \"np_list\": [],\n \"partner_id\": app[\"partner_id\"] if app[\"partner_id\"] else \"\",\n }\n promised_date = app.get(\"promised_date\")\n if not promised_date or promised_date.date() < date.today():\n result[latest_bomber_id]['np_list'].append(app[\"id\"])\n else:\n result[latest_bomber_id]['p_list'].append(app[\"id\"])\n return result\n\n\n# 获取多余的件,并且计算每个人所需要的件\ndef get_surplus_application(new_ids, del_ids, average_nums, classified_apps):\n surplus_apps = []\n # 如果id在删除队列中,将对应id所有的件重新分配\n for del_id in del_ids:\n del_res = classified_apps.get(del_id,{})\n p_list = del_res.get(\"p_list\", [])\n np_list = del_res.get(\"np_list\", [])\n del_res[\"need_num\"] = -(len(p_list) + len(np_list))\n del_res[\"to_list\"] = np_list + p_list\n surplus_apps.extend(p_list)\n surplus_apps.extend(np_list)\n # 计算每个用户的下p和没下p的件的个数,和自己需要的件的个数\n for index, bid in enumerate(new_ids):\n average = average_nums[index]\n bomber_app = classified_apps.get(bid)\n if not bomber_app:\n # 获取partner_id\n bomber = (Bomber.select(Bomber.partner_id)\n .where(Bomber.id == bid)\n .first())\n bomber_app = {\n \"bid\": bid,\n \"p_list\": [],\n \"p_num\": 0,\n \"np_list\": [],\n \"np_num\": 0,\n \"need_num\": average,\n \"partner_id\": bomber.partner_id if bomber else ''\n }\n classified_apps[bid] = bomber_app\n else:\n p_num = len(bomber_app[\"p_list\"])\n np_num = len(bomber_app[\"np_list\"])\n # 如果下p件大于平均值,直接将他剩余所有件都放入到多余列表中\n if p_num > average:\n bomber_app[\"need_num\"] = - np_num\n else:\n bomber_app[\"need_num\"] = average - (p_num + np_num)\n bomber_app[\"p_num\"] = p_num\n bomber_app[\"np_num\"] = np_num\n # 将多余的件放入到多余列表中\n if bomber_app[\"need_num\"] < 0:\n # 将件随机,确保分件的逾期天数尽量均匀\n random.shuffle(bomber_app[\"np_list\"])\n res_over = bomber_app[\"np_list\"][:-bomber_app[\"need_num\"]]\n bomber_app[\"to_list\"] = res_over\n surplus_apps.extend(res_over)\n # 按照need_num进行排序\n classified_apps_list = sorted(classified_apps.values(),\n key=lambda x:x[\"need_num\"],\n reverse=True)\n return surplus_apps, classified_apps_list\n\n\n# 更新数据库数据,进行分件\ndef update_applications(surplus_apps, classified_apps, cycle):\n # 多余得件进行随机\n random.shuffle(surplus_apps)\n for app in classified_apps:\n status = 0\n try:\n if app[\"need_num\"] > 0:\n from_list = surplus_apps[:app[\"need_num\"]]\n # 移除surplus_apps中的元素\n for i in from_list: surplus_apps.remove(i)\n app[\"from_list\"] = from_list\n with db.atomic():\n q = Application.update(\n {Application.latest_bomber_id: app[\"bid\"]}).where(\n Application.id.in_(from_list))\n q.execute()\n # 分件入案\n in_record_params = {\n \"dest_bomber_id\": app[\"bid\"],\n \"application_ids\": from_list,\n \"dest_partner_id\": app[\"partner_id\"],\n \"cycle\": cycle,\n }\n new_in_record(**in_record_params)\n status = 1\n elif app[\"need_num\"] < 0:\n #分件出案\n out_record_params = {\n \"src_bomber_id\": app[\"bid\"],\n \"application_ids\": app[\"to_list\"]\n }\n new_out_record(**out_record_params)\n status = 1\n else:\n status = 1\n except Exception as e:\n logging.error(\"分件异常,params:%s,error:%s\"%(app,str(e)))\n #记录操作日志\n log_params = {\n \"bomber_id\": app[\"bid\"],\n \"form_ids\": json.dumps(app.get(\"from_list\", [])),\n \"to_ids\": json.dumps(app.get(\"to_list\", [])),\n \"need_num\": app.get(\"need_num\"),\n \"np_ids\": json.dumps(app.get(\"np_list\", [])),\n \"p_ids\": json.dumps(app.get(\"p_list\", [])),\n \"status\": status\n }\n DispatchAppLogs.create(**log_params)\n return classified_apps\n\n\n# 人员变动分配分期的催收单\ndef get_instalment_change_bomber():\n result ={}\n bomber_logs = (BomberLog.select(BomberLog.bomber_id,\n BomberLog.operation,\n Bomber.instalment,\n Bomber.group_id)\n .join(Bomber, JOIN_INNER,\n on=BomberLog.bomber_id == Bomber.id)\n .where(fn.DATE(BomberLog.created_at) == date.today(),\n BomberLog.operation << [0,1],\n Bomber.instalment > 0)\n .dicts())\n for bl in bomber_logs:\n cycle = bl[\"instalment\"]\n group_id = bl[\"group_id\"]\n if cycle not in result:\n result[cycle] = {group_id: {\n \"cycle\": cycle,\n \"del_ids\": [],\n \"new_ids\": []\n }}\n else:\n if group_id not in result[cycle]:\n result[cycle][group_id] = {\n \"cycle\": cycle,\n \"del_ids\": [],\n \"new_ids\": []}\n if bl[\"operation\"] == 0:\n result[cycle][group_id][\"del_ids\"].append(bl[\"bomber_id\"])\n if result:\n instalments = list(result.keys())\n bombers = (Bomber.select()\n .where(Bomber.instalment << instalments,\n Bomber.is_del == 0))\n for b in bombers:\n cycle_result = result.get(b.instalment, {})\n group_result = cycle_result.get(b.group_id)\n if not group_result:\n continue\n group_result[\"new_ids\"].append(b.id)\n result_list = []\n for cycle,group_dict in result.items():\n result_list.extend(list(group_dict.values()))\n return result_list\n return []\n\ndef instalment_update_applications(surplus_apps, classified_apps, cycle):\n end = 0\n for app in classified_apps:\n if app[\"need_num\"] <= 0:\n continue\n start = end\n end = start + app[\"need_num\"]\n aids = surplus_apps[start:end]\n app[\"from_list\"] = aids\n status = 0\n with db.atomic():\n q = (Application.update(last_bomber = Application.latest_bomber,\n latest_bomber = app[\"bid\"],\n ptp_bomber = None)\n .where(Application.id << aids)\n .execute())\n # 入案和出案\n record_param = {\n \"cycle\": cycle,\n \"application_ids\": aids,\n \"dest_bomber_id\": app[\"bid\"],\n \"dest_partner_id\": app[\"partner_id\"],\n }\n out_and_in_record_instalment(**record_param)\n status = 1\n # 记录操作日志\n log_params = {\n \"bomber_id\": app[\"bid\"],\n \"form_ids\": json.dumps(app.get(\"from_list\", [])),\n \"to_ids\": json.dumps(app.get(\"to_list\", [])),\n \"need_num\": app.get(\"need_num\"),\n \"np_ids\": json.dumps(app.get(\"np_list\", [])),\n \"p_ids\": json.dumps(app.get(\"p_list\", [])),\n \"status\": status\n }\n DispatchAppLogs.create(**log_params)\n return classified_apps\n\n# 执行人员变动分件\ndef change_bomber_dispatch_apps(change_bombers,\n type=ApplicationType.CASH_LOAN.value):\n if not change_bombers:\n return\n for bombers in change_bombers:\n del_ids = bombers.get(\"del_ids\", [])\n new_ids = bombers.get(\"new_ids\", [])\n cycle = bombers.get(\"cycle\")\n if not all([new_ids, cycle]):\n logging.info(\n \"获取需要分件的信息异常,bomber:%s,type:%s\" % (bombers, type))\n continue\n # 获取总apps\n apps = get_total_application(cycle, del_ids, new_ids, type)\n if not apps:\n logging.info(\n \"分件没有获取到对应的件,bomber:%s,type:%s\" % (bombers, type))\n continue\n # 获取平均数列表\n average_nums = get_average_number(len(apps), len(new_ids))\n # 分类统计apps\n classified_apps = classified_statistic_apps(apps)\n # 计算每个人需要分的件和多余的件\n superlus_apps, classified_apps = get_surplus_application(new_ids,\n del_ids,\n average_nums,\n classified_apps)\n # 分件,更新数据库\n if type == ApplicationType.CASH_LOAN.value:\n result = update_applications(superlus_apps, classified_apps, cycle)\n elif type == ApplicationType.CASH_LOAN_STAGING.value:\n result = instalment_update_applications(superlus_apps,\n classified_apps,\n cycle)\n else:\n logging.info(\"人员变动触发分件,unknown type:%s\" % type)\n\n logging.info(\"人员变动触发的分件:result:%s,type:%s\" % (result, type))\n\n\n#bomber人员变动,进行分件\n@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)\ndef bomber_dispatch_applications(payload, msg_id):\n #通过当天的登录日志,判断人员变动,若删除bomber_log会记录\n change_bombers = get_change_bomber()\n instalment_change_bombers = get_instalment_change_bomber()\n params = {ApplicationType.CASH_LOAN.value: change_bombers,\n ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}\n for type,bombers in params.items():\n change_bomber_dispatch_apps(change_bombers=bombers,type=type)\n\n\n@action(MessageAction.REPAIR_BOMBER)\ndef repair_bomber(payload, msg_id):\n app_mobile = payload['app_mobile']\n username = payload.get('user_name')\n logging.info('start repair bomber, number: %s' % app_mobile)\n\n # 得到用户填写的EC,确认该EC号码是否在催收中,并存储关系\n if 'mobile_no' in payload and payload['mobile_no']:\n mobile = number_strip(str(payload['mobile_no']))[:64]\n name = payload.get('mobile_name')\n application = Application.filter(Application.user_mobile_no == mobile)\n if application.exists():\n repair_contact(app_mobile, application, username)\n add_relationship(app_mobile, mobile, username, name)\n\n if 'tel_no' in payload and payload['tel_no']:\n tel_no = number_strip(str(payload['tel_no']))[:64]\n name = payload.get('tel_name')\n application = Application.filter(Application.user_mobile_no == tel_no)\n if application.exists():\n repair_contact(app_mobile, application, username)\n add_relationship(app_mobile, tel_no, username, name)\n\n\ndef repair_contact(number, application, name):\n # 填写的ec有过逾期则将号码加入contact中\n application = application.first()\n contact = (Contact\n .filter(Contact.user_id == application.user_id,\n Contact.number == number))\n if not contact.exists():\n Contact.create(\n user_id=application.user_id,\n name=name,\n number=number,\n relationship=Relationship.FAMILY.value,\n source='repair ec',\n real_relationship=Relationship.FAMILY.value\n )\n logging.info('add repair contact success, number: %s' % number)\n\n\ndef add_relationship(number, ec_number, username, name):\n # 存储关系\n query = (TotalContact\n .objects(src_number=str(number),\n dest_number=ec_number,\n source=20,\n is_calc=False\n )\n .first())\n if not query:\n TotalContact(\n src_number=str(number),\n src_name=username,\n dest_number=ec_number,\n dest_name=name,\n source=20).save()\n logging.info('add relationship success, number: %s' % number)\n\n\n# 获取要统计的时间范围\ndef get_summary_daily_time():\n mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')\n mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')\n now_date = datetime.now()\n now_date_time = now_date.time()\n today_str = str(now_date.date())\n if now_date_time < mid_time_t1.time():\n yes_date = now_date - timedelta(days=1)\n yes_date_str = str(yes_date.date())\n begin_str = yes_date_str + ' 17:20:00'\n end_str = today_str + ' 00:00:00'\n elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():\n begin_str = today_str + ' 00:00:00'\n end_str = today_str + ' 12:40:00'\n else:\n begin_str = today_str + ' 12:40:00'\n end_str = today_str + ' 17:20:00'\n begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')\n end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')\n # 记录统计的是哪天的数据\n summary_datetime = now_date-timedelta(minutes=30)\n summary_date = summary_datetime.date()\n return begin_time, end_time, summary_date\n\n# 每天12:40 和 17:20 和 凌晨 更新当天数据\n@action(MessageAction.SUMMARY_DAILY)\ndef summary_daily_data(payload, msg_id):\n begin_time, end_time, summary_date = get_summary_daily_time()\n call_actions = (CallActionsR.select(CallActionsR.id,\n CallActionsR.bomber_id,\n CallActionsR.application_id,\n CallActionsR.promised_date,\n CallActionsR.cycle,\n CallActionsR.name,\n CallActionsR.number)\n .where(CallActionsR.created_at >= begin_time,\n CallActionsR.created_at < end_time,\n CallActionsR.type << (0,1)))\n summary_dailys = {}\n for call in call_actions:\n if call.bomber_id not in summary_dailys:\n summary_dailys[call.bomber_id] = {'ptp_cnt': 0,\n 'call_cnt': 0,\n 'cycle': call.cycle,\n 'repayment': 0,\n 'bomber_id': call.bomber_id,\n 'summary_date':str(summary_date)}\n\n # C2,C3的下p的件会多一条没有number和name的数据\n if call.name and call.number:\n summary_dailys[call.bomber_id]['call_cnt'] += 1\n\n if call.promised_date:\n summary_dailys[call.bomber_id]['ptp_cnt'] += 1\n\n # 获取回款信息\n C1_sql = \"\"\"\n SELECT a.current_bomber_id,\n sum(principal_part+late_fee_part) as pay_amount,a.cycle\n from \n (select a.cycle,a.current_bomber_id,b.username,a.principal_part,\n a.late_fee_part,a.application_id,a.repay_at\n FROM bomber.repayment_log a ,bomber.bomber b\n WHERE a.repay_at >= '%s' AND a.repay_at <'%s'\n AND a.current_bomber_id !=''\n AND a.current_bomber_id = b.id\n and b.role_id in (1,2,4,5)\n and principal_part+late_fee_part>0\n group by 6,7) a\n GROUP BY a.cycle,a.current_bomber_id\n \"\"\" % (begin_time, end_time)\n C1_repayment = run_all_sql(C1_sql)\n other_sql = \"\"\"\n select current_bomber_id,sum(pay_amount) as pay_amount,cycle\n from (\n select application_id,current_bomber_id,pay_amount,repay_at,cycle\n from (\n select br.application_id,br.current_bomber_id,\n br.principal_part+br.late_fee_part as pay_amount,br.repay_at,\n br.cycle\n from bomber.repayment_log br\n left join bomber.bomber bb on br.current_bomber_id=bb.id\n where exists (select 1 from bomber.bombing_history bb \n where br.current_bomber_id=bb.bomber_id \n and br.application_id=bb.application_id \n and bb.created_at<br.repay_at \n and (bb.promised_date is not null \n or bb.promised_amount is not null))\n and br.repay_at >= '%s'\n and br.repay_at < '%s'\n and bb.role_id in (3,6,7,8,9) \n and br.principal_part+br.late_fee_part > 0\n group by 1,4\n ) a\n group by 1,4) b\n group by 1\n \"\"\" % (begin_time, end_time)\n other_repayment = run_all_sql(other_sql)\n all_repayment = C1_repayment + other_repayment\n for res in all_repayment:\n bomber_id,pay_amount,cycle = res\n if bomber_id in summary_dailys:\n summary_dailys[bomber_id]['repayment'] += pay_amount\n else:\n summary_dailys[bomber_id] = {'ptp_cnt': 0,\n 'call_cnt': 0,\n 'cycle': cycle,\n 'repayment': pay_amount,\n 'bomber_id': bomber_id,\n 'summary_date': str(summary_date)\n }\n insert_values = list(summary_dailys.values())\n if insert_values:\n SummaryDaily.insert_many(insert_values).execute()\n\n# 获取本cycle所有没完成的件\ndef get_cycle_all_no_paid_app(cycle, type=None):\n apps = (Application\n .select(Application.id,\n Application.latest_bomber_id,\n Application.ptp_bomber,\n Application.promised_date,\n Application.cycle)\n .where(Application.cycle == cycle,\n Application.status != ApplicationStatus.REPAID.value,\n Application.type == type)\n .dicts())\n\n dis_app_ids = [a['id'] for a in apps]\n # 将dispatch_app中的件状态更新\n with db.atomic():\n for idx in range(0, len(dis_app_ids), 1000):\n ids = dis_app_ids[idx:idx + 1000]\n q = (DispatchApp.update(status = DisAppStatus.ABNORMAL.value)\n .where(DispatchApp.application << ids)\n .execute())\n return apps\n\n# 根据bomber_id整理app\ndef get_app_logs(apps):\n app_logs = {}\n all_np_apps = []\n all_p_apps = []\n for a in apps:\n latest_bomber = a[\"latest_bomber\"]\n # 2 代替催收单中latest_bomber是空的情况,\n latest_bomber = a[\"cycle\"] if not latest_bomber else latest_bomber\n if latest_bomber in app_logs:\n app_logs[latest_bomber][\"to_ids\"].append(a[\"id\"])\n else:\n app_logs[latest_bomber] = {\"bomber_id\": latest_bomber,\n \"to_ids\": [a[\"id\"]],\n \"np_ids\": [],\n \"p_ids\": []}\n if (a[\"promised_date\"] and\n a[\"promised_date\"].date() >= datetime.now().date()):\n app_logs[latest_bomber][\"p_ids\"].append(a[\"id\"])\n all_p_apps.append(a)\n else:\n app_logs[latest_bomber][\"np_ids\"].append(a[\"id\"])\n all_np_apps.append(a)\n return app_logs, all_np_apps, all_p_apps\n\n# 月底分件给外包员工\ndef month_dispatch_app_out_partner(cycle,apps,app_logs,np_apps):\n # 件随机\n apps = list(apps)\n np_apps = list(np_apps)\n random.shuffle(np_apps)\n apps_len = len(apps)\n np_apps_len = len(np_apps)\n end = 0\n all_app_precentage = 0\n # 获取这个cycle所有的的外包\n partners = (Partner.select()\n .where(Partner.cycle == cycle,\n Partner.status == PartnerStatus.NORMAL.value))\n for p in partners:\n all_app_precentage += p.app_percentage\n\n for partner in partners:\n # 获取外包人员\n bombers = (Bomber.select()\n .where(Bomber.partner == partner.id,\n Bomber.is_del == 0,\n Bomber.status != BomberStatus.OUTER_LEADER.value))\n bids = {b.id:b for b in bombers}\n if len(bids) == 0:\n logging.info(\"cycle:%s,partner:%s,no bomber\"%(cycle, partner.id))\n continue\n start = end\n if np_apps_len >= int(apps_len * all_app_precentage):\n end = start + int(apps_len * partner.app_percentage)\n else:\n end = (start +\n int(np_apps_len * partner.app_percentage / all_app_precentage))\n # 外包团队应该获分到的所有件\n partner_app = np_apps[start:end]\n dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)\n # 剩余给内部员工的件\n np_apps = np_apps[end:]\n return np_apps\n\n\n# 内部员工分\ndef month_dispatch_app_inner(cycle,np_apps,app_logs,p_apps):\n sys_cycle = {1: 'AB_TEST_C1A',\n 2: 'AB_TEST_C1B',\n 3: 'AB_TEST_C2',\n 4: 'AB_TEST_C3'}\n # 获取内容部员工\n sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])\n sys_values = json.loads(sys_config.value)\n bombers = (Bomber.select().where(Bomber.id << sys_values,\n Bomber.is_del == 0))\n if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):\n bombers = bombers.where(Bomber.instalment == 0)\n bids = {b.id:b for b in bombers}\n # c1b没有下p的件要进自动外呼\n if cycle == Cycle.C1A.value:\n np_ids = [a[\"id\"] for a in np_apps]\n # 更新没有下p的件\n np = (Application\n .update(status = ApplicationStatus.PROCESSING.value,\n ptp_bomber = None,\n latest_bomber = None)\n .where(Application.id << np_ids)\n .execute())\n bomber_app_logs = app_logs.get(cycle, {})\n # 月底分件的时候,进自动外呼的件也要有入案和出案记录\n out_param = {\n \"application_ids\": bomber_app_logs.get(\"to_ids\", []),\n \"month_dispatch\": 1,\n \"src_bomber_id\": cycle,\n }\n new_out_record(**out_param)\n in_param = {\n \"cycle\": cycle,\n \"application_ids\": np_ids,\n \"dest_bomber_id\": cycle\n }\n new_in_record(**in_param)\n bomber_app_logs[\"need_num\"] = len(np_apps)\n bomber_app_logs[\"form_ids\"] = np_ids\n bomber_app_logs[\"status\"] = 1\n else:\n dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)\n\n dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)\n\n# 把件分给bomber\ndef dispatch_apps_to_bomber(cycle,apps,bids,app_logs,out_partner=True,\n type=ApplicationType.CASH_LOAN.value):\n apps = list(apps)\n random.shuffle(apps)\n # 获取每个人应该分个数\n bids_list = list(bids.keys())\n if len(bids_list) <= 0:\n logging.info(\"get_dispatch_app_to_bomber no bids\")\n return\n average_num = get_average_number(len(apps), len(bids_list))\n bomber_end = 0\n with db.atomic():\n for index, bid in enumerate(bids_list):\n current_bomber = bids.get(bid)\n bomber_app_logs = app_logs.get(bid, {})\n bomber_start = bomber_end\n bomber_end = bomber_start + average_num[index]\n bomber_apps = apps[bomber_start:bomber_end]\n from_p, from_np, from_ids,status = [], [], [], 0\n # 区分员工分到的件,哪些是下p的哪些是没下p的\n for ba in bomber_apps:\n promised_date = ba.get(\"promised_date\")\n from_ids.append(ba[\"id\"])\n if promised_date and promised_date.date() >= date.today():\n from_p.append(ba[\"id\"])\n else:\n from_np.append(ba[\"id\"])\n app_status = ApplicationStatus.AB_TEST.value\n # c1A内部下p的件要特殊状态\n if (cycle == Cycle.C1A.value and not out_partner\n and type == ApplicationType.CASH_LOAN.value):\n app_status = ApplicationStatus.PROCESSING.value\n if from_p:\n p = (Application\n .update(ptp_bomber=bid,\n latest_bomber=bid,\n status=app_status)\n .where(Application.id << from_p)\n .execute())\n p_ids = bomber_app_logs.get(\"p_ids\", []) + from_p\n bomber_app_logs[\"p_ids\"] = p_ids\n if from_np:\n np = (Application\n .update(latest_bomber=bid,\n ptp_bomber=None,\n status=ApplicationStatus.AB_TEST.value)\n .where(Application.id << from_np)\n .execute())\n np_ids = bomber_app_logs.get(\"np_ids\", []) + from_np\n bomber_app_logs[\"np_ids\"] = np_ids\n in_param = {\"cycle\": cycle,\n \"dest_partner_id\": current_bomber.partner_id,\n \"application_ids\": from_ids,\n \"dest_bomber_id\": bid,\n }\n if type == ApplicationType.CASH_LOAN.value:\n out_param = {\"src_bomber_id\": bid,\n \"application_ids\": bomber_app_logs.get(\"to_ids\",[]),\n \"month_dispatch\":1\n }\n # 出案\n new_out_record(**out_param)\n # 入案\n new_in_record(**in_param)\n else:\n out_and_in_record_instalment(**in_param)\n bomber_app_logs[\"status\"] = 1\n need_num = bomber_app_logs.get(\"need_num\", 0) + average_num[index]\n bomber_app_logs[\"need_num\"] = need_num\n all_form_ids = bomber_app_logs.get(\"form_ids\", []) + from_ids\n bomber_app_logs[\"form_ids\"] = all_form_ids\n # 如果是内部的分件,不用执行下面的操作\n if not out_partner:\n continue\n # 分给外包的件,要记录在dispatch_app中.将原来的记录删除,在插入新的数据\n try:\n (DispatchApp.delete()\n .where(DispatchApp.application.in_(from_ids))\n .execute())\n dispatch_ins = [{\"application\": id,\n \"partner\": current_bomber.partner_id,\n \"bomber\": bid,\n \"status\": DisAppStatus.NORMAL.value,\n } for id in from_ids]\n (DispatchApp.insert_many(dispatch_ins).execute())\n except Exception as e:\n logging.info(\n \"month_disapp_error error:%s,bid:%s,from_ids:%s\" %\n (str(e), bid, from_ids))\n\n\n# 计算每个件的逾期天数,根据逾期天数更新对应的cycle\ndef calc_instalment_apps_cycle():\n cycle_list = [Cycle.C2.value, Cycle.C3.value]\n for cycle in cycle_list:\n apps = (ApplicationR.select(ApplicationR.id,\n ApplicationR.cycle,\n ApplicationR.overdue_days.alias(\"ods\"),\n ApplicationR.latest_bomber,\n OverdueBillR.status,\n OverdueBillR.overdue_days.alias(\"oods\"))\n .join(OverdueBillR, JOIN_LEFT_OUTER,\n on=ApplicationR.id == OverdueBillR.collection_id)\n .where(ApplicationR.cycle == cycle,\n ApplicationR.type ==\n ApplicationType.CASH_LOAN_STAGING.value,\n ApplicationR.status != ApplicationStatus.REPAID.value)\n .dicts())\n # 计算催收单真实的overdue_days\n lower_apps = {}\n for app in apps:\n if app[\"status\"] == ApplicationStatus.REPAID.value:\n continue\n aid = app[\"id\"]\n if aid in lower_apps:\n lower_apps[aid][\"ods\"] = max(app[\"oods\"], app[\"ods\"])\n else:\n lower_apps[aid] = {\n \"id\": aid,\n \"cycle\": cycle,\n \"ods\": app[\"oods\"],\n }\n # 计算apps的逾期天数和当前cycle是否匹配\n for aid,app in lower_apps.items():\n new_cycle = get_cycle_by_overdue_days(app[\"ods\"])\n if new_cycle != cycle:\n update_param = {\"cycle\":new_cycle,\n \"overdue_days\":app[\"ods\"]}\n entry_time = calc_entry_time(app[\"ods\"])\n update_param.update(entry_time)\n # 更新催收单\n (Application.update(**update_param)\n .where(Application.id == aid)\n .execute())\n\n\n# 降cycle之后根据逾期天数更新以下几个时间\ndef calc_entry_time(overdue_days):\n app_entry_time = {}\n overdue_entry = {\n \"dpd1_entry\": [1, 3],\n \"C1A_entry\": [4, 10],\n \"C1B_entry\": [11, 30],\n \"C2_entry\": [31, 60],\n \"C3_entry\": [61, 90]\n }\n for key,value in overdue_entry.items():\n if value[0] <= overdue_days <= value[1]:\n app_entry_time[key] = datetime.now()\n else:\n app_entry_time[key] = None\n return app_entry_time\n\n# 分期分件\ndef instalment_month_dispatch_app():\n sys_cycle = {1: 'AB_TEST_C1A',\n 2: 'AB_TEST_C1B',\n 3: 'AB_TEST_C2',\n 4: 'AB_TEST_C3'}\n # 降cycle\n calc_instalment_apps_cycle()\n instalment_cycle_list = Cycle.values()[:4]\n for cycle in instalment_cycle_list:\n apps = get_cycle_all_no_paid_app(cycle,\n ApplicationType.CASH_LOAN_STAGING.value)\n if not apps:\n logging.info(\"instalment_month_dispatch no get apps,cycle:%s\"%cycle)\n continue\n app_logs, all_np_apps, all_p_apps = get_app_logs(apps)\n # 获取要分件的成员\n if cycle == Cycle.C1A.value:\n sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])\n sys_values = json.loads(sys_config.value)\n bombers = (Bomber.select().where(Bomber.id << sys_values,\n Bomber.is_del == 0))\n else:\n bombers = (Bomber.select().where(Bomber.is_del == 0,\n Bomber.instalment == cycle))\n bids = {b.id:b for b in bombers}\n if not bids:\n logging.info(\"instalment_month_dispatch no bomber,cycle:%s\"%cycle)\n continue\n dispatch_apps_to_bomber(cycle = cycle,\n apps = all_p_apps,\n bids = bids,\n app_logs = app_logs,\n out_partner = False,\n type = ApplicationType.CASH_LOAN_STAGING.value)\n if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):\n dispatch_apps_to_bomber(cycle=cycle,\n apps=all_np_apps,\n bids=bids,\n app_logs=app_logs,\n out_partner=False,\n type=ApplicationType.CASH_LOAN_STAGING.value)\n else:\n # 未下p的件要有入案记录\n np_ids = [a[\"id\"] for a in all_np_apps]\n np = (Application.update(status=ApplicationStatus.UNCLAIMED.value,\n ptp_bomber=None,\n latest_bomber=None)\n .where(Application.id << np_ids,\n ApplicationStatus != ApplicationStatus.REPAID.value)\n .execute())\n in_param = {\n \"cycle\": cycle,\n \"application_ids\": np_ids,\n \"dest_bomber_id\": cycle\n }\n out_and_in_record_instalment(**in_param)\n\n # 如果有降cycle的件,也记录在历史记录中\n try:\n dispatch_apps_logs = []\n for bid,app in app_logs.items():\n alg = {\n \"bomber_id\": bid,\n \"need_num\": -len(app.get(\"to_ids\", [])),\n \"form_ids\": json.dumps(app.get(\"form_ids\", [])),\n \"to_ids\": json.dumps(app.get(\"to_ids\", [])),\n \"np_ids\": json.dumps(app.get(\"np_ids\", [])),\n \"p_ids\": json.dumps(app.get(\"p_ids\", [])),\n \"status\": 1\n }\n if bid in bids:\n alg[\"need_num\"] = app.get(\"need_num\", 0)\n dispatch_apps_logs.append(alg)\n if dispatch_apps_logs:\n DispatchAppLogs.insert_many(dispatch_apps_logs).execute()\n except Exception as e:\n logging.info(\n \"instalment_dispatch_app_month log error.cycle:%s,error:%s\" % (\n cycle, str(e)))\n\n\n# 每个月月底进行所有件重新分配\n@action(MessageAction.MONTH_DISPATCH_APP)\ndef month_dispatch_app(payload, msg_id):\n # 判断几天的日期是不是1号\n if datetime.today().day != 1:\n logging.info(\"今天不是1号,不能执行分期件\")\n return\n cycle_list = [Cycle.C1A.value,\n Cycle.C1B.value,\n Cycle.C2.value,\n Cycle.C3.value]\n with db.atomic():\n for cycle in cycle_list:\n apps = get_cycle_all_no_paid_app(cycle,\n ApplicationType.CASH_LOAN.value)\n if not apps:\n logging.info(\"month_dispatch_app not get apps.cycle:%s\"%cycle)\n continue\n app_logs, all_np_apps, all_p_apps = get_app_logs(apps)\n np_apps = month_dispatch_app_out_partner(cycle=cycle,\n apps=apps,\n app_logs=app_logs,\n np_apps = all_np_apps)\n if not np_apps and not all_p_apps:\n logging.info(\"month_dispatch_app not get inner apps.cycle:%s\",\n cycle)\n continue\n month_dispatch_app_inner(cycle,np_apps,app_logs,all_p_apps)\n # 分件日志记录在表中\n try:\n dispatch_apps_logs = []\n for bid,app in app_logs.items():\n alg = {\n \"bomber_id\": bid,\n \"need_num\": app.get(\"need_num\",0),\n \"form_ids\": json.dumps(app.get(\"form_ids\", [])),\n \"to_ids\": json.dumps(app.get(\"to_ids\", [])),\n \"np_ids\": json.dumps(app.get(\"np_ids\", [])),\n \"p_ids\": json.dumps(app.get(\"p_ids\", [])),\n \"status\": 1\n }\n dispatch_apps_logs.append(alg)\n for idx in range(0, len(dispatch_apps_logs), 10):\n DispatchAppLogs.insert_many(\n dispatch_apps_logs[idx:idx + 10]).execute()\n except Exception as e:\n logging.error(\n \"insert dispatch_log error:%s,cycle:%s\"%(str(e),cycle))\n try:\n instalment_month_dispatch_app()\n except Exception as e:\n logging.info(\"instalment_month_dispatch_error:%s\"%str(e))\n\n\n# 每天定时统计催收单信息\n@action(MessageAction.SUMMARY_BOMBER_OVERDUE)\ndef summary_bomber_overdue_everyday(payload, msg_id):\n cycle_list = Cycle.values()\n which_day = date.today()\n # 获取每个cycle没有完成的订单\n for cycle in cycle_list:\n apps = (ApplicationR.select(ApplicationR.id,\n ApplicationR.cycle,\n ApplicationR.ptp_bomber,\n ApplicationR.overdue_days,\n ApplicationR.promised_date,\n ApplicationR.follow_up_date,\n ApplicationR.external_id,\n OverdueBillR.status,\n OverdueBillR.periods,\n OverdueBillR.sub_bill_id)\n .join(OverdueBillR, JOIN_LEFT_OUTER,\n on = ApplicationR.id == OverdueBillR.collection_id)\n .where(ApplicationR.status != ApplicationStatus.REPAID.value,\n ApplicationR.no_active == 0,\n ApplicationR.cycle == cycle)\n .dicts())\n\n bomber_overdue_list = []\n for app in apps:\n status = app.get(\"status\")\n if status == ApplicationStatus.REPAID.value:\n continue\n ptp_bomber = app.get(\"ptp_bomber\")\n promised_date = app.get(\"promised_date\")\n follow_up_date = app.get(\"follow_up_date\")\n if not promised_date or promised_date.date() < date.today():\n ptp_bomber = promised_date = None\n if not follow_up_date or follow_up_date.date() < date.today():\n follow_up_date = None\n overdue_dict = {\n \"collection_id\": app.get(\"id\"),\n \"external_id\": app.get(\"external_id\"),\n \"sub_bill_id\": app.get(\"sub_bill_id\"),\n \"periods\": app.get(\"periods\"),\n \"cycle\": app.get(\"cycle\") if app.get(\"cycle\") else cycle,\n \"ptp_bomber\": ptp_bomber,\n \"promised_date\": promised_date,\n \"follow_up_date\": follow_up_date,\n \"which_day\": which_day,\n \"overdue_days\": app.get(\"overdue_days\")\n }\n bomber_overdue_list.append(overdue_dict)\n try:\n if bomber_overdue_list:\n with db.atomic():\n for index in range(0, len(bomber_overdue_list), 1000):\n insert_list = bomber_overdue_list[index: index+1000]\n BomberOverdue.insert_many(insert_list).execute()\n except Exception as e:\n logging.info(\n \"summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s\"%(\n cycle,str(which_day),str(e)))\n\n# 每分钟对员工的下p件个数做个统计\n@action(MessageAction.BOMBER_PTP_REAL_TIME_SUMMARY)\ndef bomber_ptp_real_time_summary(payload, msg_id):\n ptp_switch_number = 200\n sys_ptp_switch = (SystemConfig.select()\n .where(SystemConfig.key == 'PTP_SWITCH_NUMBER')\n .first())\n if sys_ptp_switch and sys_ptp_switch.value.isdigit():\n ptp_switch_number = int(sys_ptp_switch.value)\n today = datetime.today().date()\n ptp_apps = (ApplicationR.select(fn.COUNT(ApplicationR.id).alias('ptp_cnt'),\n ApplicationR.latest_bomber)\n .where(ApplicationR.status != ApplicationStatus.REPAID.value,\n ApplicationR.cycle < Cycle.C2.value,\n ApplicationR.promised_date >= today,\n ApplicationR.latest_bomber.is_null(False))\n .group_by(ApplicationR.latest_bomber))\n\n bomber_ptps = (BomberPtp.select(BomberPtp.bomber_id))\n bomber_ptp_bids = [b.bomber_id for b in bomber_ptps]\n insert_result = []\n for app in ptp_apps:\n ptp_switch = BomberCallSwitch.ON.value\n if app.ptp_cnt >= ptp_switch_number:\n ptp_switch = BomberCallSwitch.OFF.value\n params = {\"bomber_id\": app.latest_bomber_id,\n \"ptp_cnt\": app.ptp_cnt,\n \"ptp_switch\": ptp_switch,\n \"auto_ext\": app.latest_bomber.auto_ext}\n if app.latest_bomber_id in bomber_ptp_bids:\n try:\n q = (BomberPtp.update(**params)\n .where(BomberPtp.bomber_id==app.latest_bomber_id)\n .execute())\n except Exception as e:\n logging.error(\"ptp_reil_time_summary_error:%s,data,bid:%s\" % (\n str(e),params,app.latest_bomber_id))\n else:\n insert_result.append(params)\n if insert_result:\n BomberPtp.insert_many(insert_result).execute()\n\n# 每天的10:00,14:00,16:30不让接自动外呼,员工把自动外呼的件跟进完,才能接自动外呼\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)\ndef today_ptp_auto_call_switch(payload, msg_id):\n today = datetime.today().date()\n next_day = today + timedelta(days=1)\n # 获取有今天p到期的件的催收员\n apps = (ApplicationR.select(ApplicationR.latest_bomber)\n .where(ApplicationR.promised_date < next_day,\n ApplicationR.promised_date >= today,\n ApplicationR.promised_date.is_null(False),\n ApplicationR.status != ApplicationStatus.REPAID.value,\n ApplicationR.cycle < Cycle.C2.value,\n ApplicationR.latest_bomber.is_null(False))\n .group_by(ApplicationR.latest_bomber))\n bids = [a.latest_bomber_id for a in apps]\n if not bids:\n return\n q = (BomberPtp.update(today_switch=BomberCallSwitch.OFF.value)\n .where(BomberPtp.auto_ext.is_null(False),\n BomberPtp.bomber_id << bids)\n .execute())\n\n# 每天早上8点定时刷新催收员自动外呼的状态\n@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)\ndef update_today_switch_every_day(payload, msg_id):\n q = (BomberPtp.update(today_switch=BomberCallSwitch.ON.value)\n .where(BomberPtp.auto_ext.is_null(False))\n .execute())\n\n# 用户修改电话通知bomber\n@action(MessageAction.USER_UPDATE_PHONE)\ndef user_change_phone(payload, msg_id):\n user_id = payload.get(\"user_id\")\n new_mobile_no = payload.get(\"new_mobile_no\")\n if not all([user_id, new_mobile_no]):\n logging.info(\"用户修改电话,没有获取到用户id获这用户手机号\")\n return\n source = 'applicant updated number'\n contacts = (Contact.select()\n .where(Contact.user_id == int(user_id)))\n if not contacts.exists():\n logging.info(\"用户在contact中没有记录\")\n return\n new_contact = contacts.where(Contact.number == new_mobile_no,\n Contact.source == source)\n if new_contact.exists():\n logging.info(\"用户手机号已存在\")\n return\n contact = contacts.order_by(-Contact.created_at).first()\n Contact.create(user_id=contact.user_id,\n name=contact.name,\n number = new_mobile_no,\n source = source,\n relationship = Relationship.APPLICANT.value,\n real_relationship = Relationship.APPLICANT.value)\n\n",
"step-ids": [
60,
69,
74,
80,
146
]
}
|
[
60,
69,
74,
80,
146
] |
import random
def get_ticket():
ticket = ''
s = 'abcdefghijkrmnopqrstuvwxyz1234567890'
for i in range(28):
r_num = random.choice(s)
ticket += r_num
return ticket
|
normal
|
{
"blob_id": "d2a9a2fd3a1118c0855b8f77ce4c25cc6b4e8f87",
"index": 4328,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_ticket():\n ticket = ''\n s = 'abcdefghijkrmnopqrstuvwxyz1234567890'\n for i in range(28):\n r_num = random.choice(s)\n ticket += r_num\n return ticket\n",
"step-3": "import random\n\n\ndef get_ticket():\n ticket = ''\n s = 'abcdefghijkrmnopqrstuvwxyz1234567890'\n for i in range(28):\n r_num = random.choice(s)\n ticket += r_num\n return ticket\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import math
def hipotenusa(a,b):
return math.sqrt((a*a)+(b*b))
def main():
cateto1=input('dime un cateto')
cateto2=input('dime el otro cateto')
print ('la hipotenusa es: '),hipotenusa(cateto1,cateto2)
main()
|
normal
|
{
"blob_id": "50ae2b4c6d51451031fc31ebbc43c820da54d827",
"index": 7898,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n cateto1 = input('dime un cateto')\n cateto2 = input('dime el otro cateto')\n print('la hipotenusa es: '), hipotenusa(cateto1, cateto2)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef hipotenusa(a, b):\n return math.sqrt(a * a + b * b)\n\n\ndef main():\n cateto1 = input('dime un cateto')\n cateto2 = input('dime el otro cateto')\n print('la hipotenusa es: '), hipotenusa(cateto1, cateto2)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef hipotenusa(a, b):\n return math.sqrt(a * a + b * b)\n\n\ndef main():\n cateto1 = input('dime un cateto')\n cateto2 = input('dime el otro cateto')\n print('la hipotenusa es: '), hipotenusa(cateto1, cateto2)\n\n\nmain()\n",
"step-5": "import math\r\ndef hipotenusa(a,b):\r\n return math.sqrt((a*a)+(b*b))\r\n\r\ndef main():\r\n cateto1=input('dime un cateto')\r\n cateto2=input('dime el otro cateto')\r\n print ('la hipotenusa es: '),hipotenusa(cateto1,cateto2)\r\n\r\nmain()\r\n",
"step-ids": [
0,
1,
2,
3,
5
]
}
|
[
0,
1,
2,
3,
5
] |
import time
import DHT22
import pigpio
import Sensor
class MagicBoxDHT22(object):
def DHT22(self):
self.s.trigger()
time.sleep(0.2)
self.tempF=round(self.s.temperature()*1.8+32,2) -3.7 #+adjustment
self.humidity=round(self.s.humidity())
def __init__(self):
self.pi=pigpio.pi()
self.s=DHT22.sensor(self.pi, 4)
self.tempF=0
self.humidity=0
|
normal
|
{
"blob_id": "179b07870d656fb24b73d8b0a1f76ffed08aa5c2",
"index": 9665,
"step-1": "<mask token>\n\n\nclass MagicBoxDHT22(object):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass MagicBoxDHT22(object):\n <mask token>\n\n def __init__(self):\n self.pi = pigpio.pi()\n self.s = DHT22.sensor(self.pi, 4)\n self.tempF = 0\n self.humidity = 0\n",
"step-3": "<mask token>\n\n\nclass MagicBoxDHT22(object):\n\n def DHT22(self):\n self.s.trigger()\n time.sleep(0.2)\n self.tempF = round(self.s.temperature() * 1.8 + 32, 2) - 3.7\n self.humidity = round(self.s.humidity())\n\n def __init__(self):\n self.pi = pigpio.pi()\n self.s = DHT22.sensor(self.pi, 4)\n self.tempF = 0\n self.humidity = 0\n",
"step-4": "import time\nimport DHT22\nimport pigpio\nimport Sensor\n\n\nclass MagicBoxDHT22(object):\n\n def DHT22(self):\n self.s.trigger()\n time.sleep(0.2)\n self.tempF = round(self.s.temperature() * 1.8 + 32, 2) - 3.7\n self.humidity = round(self.s.humidity())\n\n def __init__(self):\n self.pi = pigpio.pi()\n self.s = DHT22.sensor(self.pi, 4)\n self.tempF = 0\n self.humidity = 0\n",
"step-5": "import time\nimport DHT22\nimport pigpio\nimport Sensor\n\nclass MagicBoxDHT22(object):\n\n def DHT22(self):\n self.s.trigger()\n time.sleep(0.2)\n self.tempF=round(self.s.temperature()*1.8+32,2) -3.7 #+adjustment\n self.humidity=round(self.s.humidity())\n\n def __init__(self):\n self.pi=pigpio.pi()\n self.s=DHT22.sensor(self.pi, 4)\n self.tempF=0\n self.humidity=0\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for k, v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
<|reserved_special_token_0|>
with open(path) as fp:
long_description = fp.read()
setup(name='linkins', version='0.0.7.4', description=
'Links a directory structure and optionally executes user-defined scripts at each level of the directory hierarchy'
, long_description=long_description, author='Andres Buritica',
author_email='[email protected]', maintainer='Andres Buritica',
maintainer_email='[email protected]', url=
'https://github.com/thelinuxkid/linkins', license='MIT', packages=
find_packages(), test_suite='nose.collector', install_requires=[
'setuptools'], extras_require=EXTRAS_REQUIRES, entry_points={
'console_scripts': ['linkins = linkins.cli:main']}, classifiers=[
'Development Status :: 4 - Beta', 'Intended Audience :: Developers',
'Natural Language :: English', 'License :: OSI Approved :: MIT License',
'Programming Language :: Python', 'Programming Language :: Python :: 2.7'])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
EXTRAS_REQUIRES = dict(test=['pytest>=2.2.4', 'mock>=0.8.0',
'tempdirs>=0.0.8'], dev=['ipython>=0.13'])
for k, v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
root = os.path.dirname(__file__)
path = os.path.join(root, 'README.rst')
with open(path) as fp:
long_description = fp.read()
setup(name='linkins', version='0.0.7.4', description=
'Links a directory structure and optionally executes user-defined scripts at each level of the directory hierarchy'
, long_description=long_description, author='Andres Buritica',
author_email='[email protected]', maintainer='Andres Buritica',
maintainer_email='[email protected]', url=
'https://github.com/thelinuxkid/linkins', license='MIT', packages=
find_packages(), test_suite='nose.collector', install_requires=[
'setuptools'], extras_require=EXTRAS_REQUIRES, entry_points={
'console_scripts': ['linkins = linkins.cli:main']}, classifiers=[
'Development Status :: 4 - Beta', 'Intended Audience :: Developers',
'Natural Language :: English', 'License :: OSI Approved :: MIT License',
'Programming Language :: Python', 'Programming Language :: Python :: 2.7'])
<|reserved_special_token_1|>
from setuptools import setup, find_packages
import os
EXTRAS_REQUIRES = dict(test=['pytest>=2.2.4', 'mock>=0.8.0',
'tempdirs>=0.0.8'], dev=['ipython>=0.13'])
for k, v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
root = os.path.dirname(__file__)
path = os.path.join(root, 'README.rst')
with open(path) as fp:
long_description = fp.read()
setup(name='linkins', version='0.0.7.4', description=
'Links a directory structure and optionally executes user-defined scripts at each level of the directory hierarchy'
, long_description=long_description, author='Andres Buritica',
author_email='[email protected]', maintainer='Andres Buritica',
maintainer_email='[email protected]', url=
'https://github.com/thelinuxkid/linkins', license='MIT', packages=
find_packages(), test_suite='nose.collector', install_requires=[
'setuptools'], extras_require=EXTRAS_REQUIRES, entry_points={
'console_scripts': ['linkins = linkins.cli:main']}, classifiers=[
'Development Status :: 4 - Beta', 'Intended Audience :: Developers',
'Natural Language :: English', 'License :: OSI Approved :: MIT License',
'Programming Language :: Python', 'Programming Language :: Python :: 2.7'])
<|reserved_special_token_1|>
#!/usr/bin/python
from setuptools import setup, find_packages
import os
EXTRAS_REQUIRES = dict(
test=[
'pytest>=2.2.4',
'mock>=0.8.0',
'tempdirs>=0.0.8',
],
dev=[
'ipython>=0.13',
],
)
# Tests always depend on all other requirements, except dev
for k,v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
# Pypi package documentation
root = os.path.dirname(__file__)
path = os.path.join(root, 'README.rst')
with open(path) as fp:
long_description = fp.read()
setup(
name='linkins',
version='0.0.7.4',
description=(
'Links a directory structure and optionally executes '
'user-defined scripts at each level of the directory '
'hierarchy'
),
long_description=long_description,
author='Andres Buritica',
author_email='[email protected]',
maintainer='Andres Buritica',
maintainer_email='[email protected]',
url='https://github.com/thelinuxkid/linkins',
license='MIT',
packages = find_packages(),
test_suite='nose.collector',
install_requires=[
'setuptools',
],
extras_require=EXTRAS_REQUIRES,
entry_points={
'console_scripts': [
'linkins = linkins.cli:main',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7'
],
)
|
flexible
|
{
"blob_id": "f531af47431055866db72f6a7181580da461853d",
"index": 6780,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor k, v in EXTRAS_REQUIRES.iteritems():\n if k == 'test' or k == 'dev':\n continue\n EXTRAS_REQUIRES['test'] += v\n<mask token>\nwith open(path) as fp:\n long_description = fp.read()\nsetup(name='linkins', version='0.0.7.4', description=\n 'Links a directory structure and optionally executes user-defined scripts at each level of the directory hierarchy'\n , long_description=long_description, author='Andres Buritica',\n author_email='[email protected]', maintainer='Andres Buritica',\n maintainer_email='[email protected]', url=\n 'https://github.com/thelinuxkid/linkins', license='MIT', packages=\n find_packages(), test_suite='nose.collector', install_requires=[\n 'setuptools'], extras_require=EXTRAS_REQUIRES, entry_points={\n 'console_scripts': ['linkins = linkins.cli:main']}, classifiers=[\n 'Development Status :: 4 - Beta', 'Intended Audience :: Developers',\n 'Natural Language :: English', 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python', 'Programming Language :: Python :: 2.7'])\n",
"step-3": "<mask token>\nEXTRAS_REQUIRES = dict(test=['pytest>=2.2.4', 'mock>=0.8.0',\n 'tempdirs>=0.0.8'], dev=['ipython>=0.13'])\nfor k, v in EXTRAS_REQUIRES.iteritems():\n if k == 'test' or k == 'dev':\n continue\n EXTRAS_REQUIRES['test'] += v\nroot = os.path.dirname(__file__)\npath = os.path.join(root, 'README.rst')\nwith open(path) as fp:\n long_description = fp.read()\nsetup(name='linkins', version='0.0.7.4', description=\n 'Links a directory structure and optionally executes user-defined scripts at each level of the directory hierarchy'\n , long_description=long_description, author='Andres Buritica',\n author_email='[email protected]', maintainer='Andres Buritica',\n maintainer_email='[email protected]', url=\n 'https://github.com/thelinuxkid/linkins', license='MIT', packages=\n find_packages(), test_suite='nose.collector', install_requires=[\n 'setuptools'], extras_require=EXTRAS_REQUIRES, entry_points={\n 'console_scripts': ['linkins = linkins.cli:main']}, classifiers=[\n 'Development Status :: 4 - Beta', 'Intended Audience :: Developers',\n 'Natural Language :: English', 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python', 'Programming Language :: Python :: 2.7'])\n",
"step-4": "from setuptools import setup, find_packages\nimport os\nEXTRAS_REQUIRES = dict(test=['pytest>=2.2.4', 'mock>=0.8.0',\n 'tempdirs>=0.0.8'], dev=['ipython>=0.13'])\nfor k, v in EXTRAS_REQUIRES.iteritems():\n if k == 'test' or k == 'dev':\n continue\n EXTRAS_REQUIRES['test'] += v\nroot = os.path.dirname(__file__)\npath = os.path.join(root, 'README.rst')\nwith open(path) as fp:\n long_description = fp.read()\nsetup(name='linkins', version='0.0.7.4', description=\n 'Links a directory structure and optionally executes user-defined scripts at each level of the directory hierarchy'\n , long_description=long_description, author='Andres Buritica',\n author_email='[email protected]', maintainer='Andres Buritica',\n maintainer_email='[email protected]', url=\n 'https://github.com/thelinuxkid/linkins', license='MIT', packages=\n find_packages(), test_suite='nose.collector', install_requires=[\n 'setuptools'], extras_require=EXTRAS_REQUIRES, entry_points={\n 'console_scripts': ['linkins = linkins.cli:main']}, classifiers=[\n 'Development Status :: 4 - Beta', 'Intended Audience :: Developers',\n 'Natural Language :: English', 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python', 'Programming Language :: Python :: 2.7'])\n",
"step-5": "#!/usr/bin/python\nfrom setuptools import setup, find_packages\nimport os\n\nEXTRAS_REQUIRES = dict(\n test=[\n 'pytest>=2.2.4',\n 'mock>=0.8.0',\n 'tempdirs>=0.0.8',\n ],\n dev=[\n 'ipython>=0.13',\n ],\n )\n\n# Tests always depend on all other requirements, except dev\nfor k,v in EXTRAS_REQUIRES.iteritems():\n if k == 'test' or k == 'dev':\n continue\n EXTRAS_REQUIRES['test'] += v\n\n# Pypi package documentation\nroot = os.path.dirname(__file__)\npath = os.path.join(root, 'README.rst')\nwith open(path) as fp:\n long_description = fp.read()\n\nsetup(\n name='linkins',\n version='0.0.7.4',\n description=(\n 'Links a directory structure and optionally executes '\n 'user-defined scripts at each level of the directory '\n 'hierarchy'\n ),\n long_description=long_description,\n author='Andres Buritica',\n author_email='[email protected]',\n maintainer='Andres Buritica',\n maintainer_email='[email protected]',\n url='https://github.com/thelinuxkid/linkins',\n license='MIT',\n packages = find_packages(),\n test_suite='nose.collector',\n install_requires=[\n 'setuptools',\n ],\n extras_require=EXTRAS_REQUIRES,\n entry_points={\n 'console_scripts': [\n 'linkins = linkins.cli:main',\n ],\n },\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Natural Language :: English',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2.7'\n ],\n)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import sys
from dkfileutils.path import Path
def line_endings(fname):
"""Return all line endings in the file.
"""
_endings = {line[-2:] for line in open(fname, 'rb').readlines()}
res = set()
for e in _endings:
if e.endswith(b'\r'):
res.add(b'\r')
elif e.endswith(b'\r\n'):
res.add(b'\r\n')
elif e.endswith(b'\n'):
res.add(b'\n')
return res
def chomp(s):
"""Remove line terminator if it exists.
"""
if s[-2:] == b'\r\n':
return s[:-2]
if s[-1:] == b'\r' or s[-1:] == b'\n':
return s[:-1]
return s
def fix_line_endings(fname, eol=b'\n'):
"""Change all line endings to ``eol``.
"""
lines = [chomp(line) for line in open(fname, 'rb').readlines()]
with open(fname, 'wb') as fp:
for line in lines:
fp.write(line + eol)
def copy(ctx, source, dest, force=False):
"""Copy ``source`` to ``dest``, which can be a file or directory.
"""
# print "COPY:", locals()
# print "COPY:", ctx.force, ctx.verbose
if source == dest:
return dest
source = os.path.normcase(os.path.normpath(str(source)))
dest = os.path.normcase(os.path.normpath(str(dest)))
flags = ""
if sys.platform == 'win32':
if force:
flags += " /Y"
# print 'copy {flags} {source} {dest}'.format(**locals())
ctx.run('copy {flags} {source} {dest}'.format(**locals()))
else: # pragma: nocover
if force:
flags += " --force"
ctx.run('cp {flags} {source} {dest}'.format(**locals()))
return dest
def concat(ctx, dest, *sources, **kw):
force = kw.pop('force', False) # noqa
placement = Path(dest).dirname()
placement.makedirs()
with open(dest, 'w') as out:
print("Opened:", dest, "for writing.")
for s in sources:
with open(s, 'r') as inp:
print(" appending:", s)
out.writelines(inp.readlines())
out.write('\n')
# flags = ""
# if sys.platform == 'win32':
# if force:
# flags += " /Y"
# source = '+'.join(sources)
# source = source.replace('/', '\\')
# ctx.run('copy {flags} {source} {dest}'.format(**locals()))
# else: # pragma: nocover
# if force:
# pass
# # flags += " --force"
# source = ' '.join(sources)
# # print 'cat {flags} {source} > {dest}'.format(**locals())
# ctx.run('cat {flags} {source} > {dest}'.format(**locals()))
fix_line_endings(dest)
# if len(line_endings(dest)) > 1:
# fix_line_endings(dest)
return dest
|
normal
|
{
"blob_id": "be279fe44b0d52c9d473e08d8b9c28d5b6386b45",
"index": 5184,
"step-1": "<mask token>\n\n\ndef line_endings(fname):\n \"\"\"Return all line endings in the file.\n \"\"\"\n _endings = {line[-2:] for line in open(fname, 'rb').readlines()}\n res = set()\n for e in _endings:\n if e.endswith(b'\\r'):\n res.add(b'\\r')\n elif e.endswith(b'\\r\\n'):\n res.add(b'\\r\\n')\n elif e.endswith(b'\\n'):\n res.add(b'\\n')\n return res\n\n\n<mask token>\n\n\ndef fix_line_endings(fname, eol=b'\\n'):\n \"\"\"Change all line endings to ``eol``.\n \"\"\"\n lines = [chomp(line) for line in open(fname, 'rb').readlines()]\n with open(fname, 'wb') as fp:\n for line in lines:\n fp.write(line + eol)\n\n\n<mask token>\n\n\ndef concat(ctx, dest, *sources, **kw):\n force = kw.pop('force', False)\n placement = Path(dest).dirname()\n placement.makedirs()\n with open(dest, 'w') as out:\n print('Opened:', dest, 'for writing.')\n for s in sources:\n with open(s, 'r') as inp:\n print(' appending:', s)\n out.writelines(inp.readlines())\n out.write('\\n')\n fix_line_endings(dest)\n return dest\n",
"step-2": "<mask token>\n\n\ndef line_endings(fname):\n \"\"\"Return all line endings in the file.\n \"\"\"\n _endings = {line[-2:] for line in open(fname, 'rb').readlines()}\n res = set()\n for e in _endings:\n if e.endswith(b'\\r'):\n res.add(b'\\r')\n elif e.endswith(b'\\r\\n'):\n res.add(b'\\r\\n')\n elif e.endswith(b'\\n'):\n res.add(b'\\n')\n return res\n\n\ndef chomp(s):\n \"\"\"Remove line terminator if it exists.\n \"\"\"\n if s[-2:] == b'\\r\\n':\n return s[:-2]\n if s[-1:] == b'\\r' or s[-1:] == b'\\n':\n return s[:-1]\n return s\n\n\ndef fix_line_endings(fname, eol=b'\\n'):\n \"\"\"Change all line endings to ``eol``.\n \"\"\"\n lines = [chomp(line) for line in open(fname, 'rb').readlines()]\n with open(fname, 'wb') as fp:\n for line in lines:\n fp.write(line + eol)\n\n\n<mask token>\n\n\ndef concat(ctx, dest, *sources, **kw):\n force = kw.pop('force', False)\n placement = Path(dest).dirname()\n placement.makedirs()\n with open(dest, 'w') as out:\n print('Opened:', dest, 'for writing.')\n for s in sources:\n with open(s, 'r') as inp:\n print(' appending:', s)\n out.writelines(inp.readlines())\n out.write('\\n')\n fix_line_endings(dest)\n return dest\n",
"step-3": "<mask token>\n\n\ndef line_endings(fname):\n \"\"\"Return all line endings in the file.\n \"\"\"\n _endings = {line[-2:] for line in open(fname, 'rb').readlines()}\n res = set()\n for e in _endings:\n if e.endswith(b'\\r'):\n res.add(b'\\r')\n elif e.endswith(b'\\r\\n'):\n res.add(b'\\r\\n')\n elif e.endswith(b'\\n'):\n res.add(b'\\n')\n return res\n\n\ndef chomp(s):\n \"\"\"Remove line terminator if it exists.\n \"\"\"\n if s[-2:] == b'\\r\\n':\n return s[:-2]\n if s[-1:] == b'\\r' or s[-1:] == b'\\n':\n return s[:-1]\n return s\n\n\ndef fix_line_endings(fname, eol=b'\\n'):\n \"\"\"Change all line endings to ``eol``.\n \"\"\"\n lines = [chomp(line) for line in open(fname, 'rb').readlines()]\n with open(fname, 'wb') as fp:\n for line in lines:\n fp.write(line + eol)\n\n\ndef copy(ctx, source, dest, force=False):\n \"\"\"Copy ``source`` to ``dest``, which can be a file or directory.\n \"\"\"\n if source == dest:\n return dest\n source = os.path.normcase(os.path.normpath(str(source)))\n dest = os.path.normcase(os.path.normpath(str(dest)))\n flags = ''\n if sys.platform == 'win32':\n if force:\n flags += ' /Y'\n ctx.run('copy {flags} {source} {dest}'.format(**locals()))\n else:\n if force:\n flags += ' --force'\n ctx.run('cp {flags} {source} {dest}'.format(**locals()))\n return dest\n\n\ndef concat(ctx, dest, *sources, **kw):\n force = kw.pop('force', False)\n placement = Path(dest).dirname()\n placement.makedirs()\n with open(dest, 'w') as out:\n print('Opened:', dest, 'for writing.')\n for s in sources:\n with open(s, 'r') as inp:\n print(' appending:', s)\n out.writelines(inp.readlines())\n out.write('\\n')\n fix_line_endings(dest)\n return dest\n",
"step-4": "from __future__ import print_function\nimport os\nimport sys\nfrom dkfileutils.path import Path\n\n\ndef line_endings(fname):\n \"\"\"Return all line endings in the file.\n \"\"\"\n _endings = {line[-2:] for line in open(fname, 'rb').readlines()}\n res = set()\n for e in _endings:\n if e.endswith(b'\\r'):\n res.add(b'\\r')\n elif e.endswith(b'\\r\\n'):\n res.add(b'\\r\\n')\n elif e.endswith(b'\\n'):\n res.add(b'\\n')\n return res\n\n\ndef chomp(s):\n \"\"\"Remove line terminator if it exists.\n \"\"\"\n if s[-2:] == b'\\r\\n':\n return s[:-2]\n if s[-1:] == b'\\r' or s[-1:] == b'\\n':\n return s[:-1]\n return s\n\n\ndef fix_line_endings(fname, eol=b'\\n'):\n \"\"\"Change all line endings to ``eol``.\n \"\"\"\n lines = [chomp(line) for line in open(fname, 'rb').readlines()]\n with open(fname, 'wb') as fp:\n for line in lines:\n fp.write(line + eol)\n\n\ndef copy(ctx, source, dest, force=False):\n \"\"\"Copy ``source`` to ``dest``, which can be a file or directory.\n \"\"\"\n if source == dest:\n return dest\n source = os.path.normcase(os.path.normpath(str(source)))\n dest = os.path.normcase(os.path.normpath(str(dest)))\n flags = ''\n if sys.platform == 'win32':\n if force:\n flags += ' /Y'\n ctx.run('copy {flags} {source} {dest}'.format(**locals()))\n else:\n if force:\n flags += ' --force'\n ctx.run('cp {flags} {source} {dest}'.format(**locals()))\n return dest\n\n\ndef concat(ctx, dest, *sources, **kw):\n force = kw.pop('force', False)\n placement = Path(dest).dirname()\n placement.makedirs()\n with open(dest, 'w') as out:\n print('Opened:', dest, 'for writing.')\n for s in sources:\n with open(s, 'r') as inp:\n print(' appending:', s)\n out.writelines(inp.readlines())\n out.write('\\n')\n fix_line_endings(dest)\n return dest\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import print_function\nimport os\nimport sys\n\nfrom dkfileutils.path import Path\n\n\ndef line_endings(fname):\n \"\"\"Return all line endings in the file.\n \"\"\"\n _endings = {line[-2:] for line in open(fname, 'rb').readlines()}\n res = set()\n for e in _endings:\n if e.endswith(b'\\r'):\n res.add(b'\\r')\n elif e.endswith(b'\\r\\n'):\n res.add(b'\\r\\n')\n elif e.endswith(b'\\n'):\n res.add(b'\\n')\n return res\n\n\ndef chomp(s):\n \"\"\"Remove line terminator if it exists.\n \"\"\"\n if s[-2:] == b'\\r\\n':\n return s[:-2]\n if s[-1:] == b'\\r' or s[-1:] == b'\\n':\n return s[:-1]\n return s\n\n\ndef fix_line_endings(fname, eol=b'\\n'):\n \"\"\"Change all line endings to ``eol``.\n \"\"\"\n lines = [chomp(line) for line in open(fname, 'rb').readlines()]\n with open(fname, 'wb') as fp:\n for line in lines:\n fp.write(line + eol)\n\n\ndef copy(ctx, source, dest, force=False):\n \"\"\"Copy ``source`` to ``dest``, which can be a file or directory.\n \"\"\"\n # print \"COPY:\", locals()\n # print \"COPY:\", ctx.force, ctx.verbose\n if source == dest:\n return dest\n\n source = os.path.normcase(os.path.normpath(str(source)))\n dest = os.path.normcase(os.path.normpath(str(dest)))\n flags = \"\"\n if sys.platform == 'win32':\n if force:\n flags += \" /Y\"\n # print 'copy {flags} {source} {dest}'.format(**locals())\n ctx.run('copy {flags} {source} {dest}'.format(**locals()))\n else: # pragma: nocover\n if force:\n flags += \" --force\"\n ctx.run('cp {flags} {source} {dest}'.format(**locals()))\n return dest\n\n\ndef concat(ctx, dest, *sources, **kw):\n force = kw.pop('force', False) # noqa\n placement = Path(dest).dirname()\n placement.makedirs()\n\n with open(dest, 'w') as out:\n print(\"Opened:\", dest, \"for writing.\")\n for s in sources:\n with open(s, 'r') as inp:\n print(\" appending:\", s)\n out.writelines(inp.readlines())\n out.write('\\n')\n\n # flags = \"\"\n # if sys.platform == 'win32':\n # if force:\n # flags += \" /Y\"\n # source = '+'.join(sources)\n # source = source.replace('/', '\\\\')\n # ctx.run('copy {flags} {source} {dest}'.format(**locals()))\n # else: # pragma: nocover\n # if force:\n # pass\n # # flags += \" --force\"\n # source = ' '.join(sources)\n # # print 'cat {flags} {source} > {dest}'.format(**locals())\n # ctx.run('cat {flags} {source} > {dest}'.format(**locals()))\n\n fix_line_endings(dest)\n # if len(line_endings(dest)) > 1:\n # fix_line_endings(dest)\n\n return dest\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 27 10:34:15 2021
@author: Ivan
課程教材:行銷人轉職爬蟲王實戰|5大社群平台+2大電商
版權屬於「楊超霆」所有,若有疑問,可聯絡[email protected]
第一章 爬蟲基本訓練
Html爬蟲Post教學-台灣股市資訊網
"""
import requests
from bs4 import BeautifulSoup
# 要抓取的網址
url = 'https://goodinfo.tw/StockInfo/StockDividendPolicy.asp?STOCK_ID=2002'
# 附帶的資料必須要有
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36' }
#請求網站
list_req = requests.post(url, headers=headers)
#將整個網站的程式碼爬下來
soup = BeautifulSoup(list_req.content, "html.parser")
#抓取想要的資料
soup.find('td',{'style':'color:red'}).text
|
normal
|
{
"blob_id": "a5918679b6e3a9bde54808264d9526c6a191578f",
"index": 7737,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsoup.find('td', {'style': 'color:red'}).text\n",
"step-3": "<mask token>\nurl = 'https://goodinfo.tw/StockInfo/StockDividendPolicy.asp?STOCK_ID=2002'\nheaders = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36'\n }\nlist_req = requests.post(url, headers=headers)\nsoup = BeautifulSoup(list_req.content, 'html.parser')\nsoup.find('td', {'style': 'color:red'}).text\n",
"step-4": "<mask token>\nimport requests\nfrom bs4 import BeautifulSoup\nurl = 'https://goodinfo.tw/StockInfo/StockDividendPolicy.asp?STOCK_ID=2002'\nheaders = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36'\n }\nlist_req = requests.post(url, headers=headers)\nsoup = BeautifulSoup(list_req.content, 'html.parser')\nsoup.find('td', {'style': 'color:red'}).text\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Tue Apr 27 10:34:15 2021\r\n\r\n@author: Ivan\r\n課程教材:行銷人轉職爬蟲王實戰|5大社群平台+2大電商\r\n版權屬於「楊超霆」所有,若有疑問,可聯絡[email protected]\r\n\r\n第一章 爬蟲基本訓練\r\nHtml爬蟲Post教學-台灣股市資訊網\r\n\"\"\"\r\nimport requests\r\nfrom bs4 import BeautifulSoup\r\n\r\n# 要抓取的網址\r\nurl = 'https://goodinfo.tw/StockInfo/StockDividendPolicy.asp?STOCK_ID=2002'\r\n# 附帶的資料必須要有\r\nheaders = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36' }\r\n\r\n#請求網站\r\nlist_req = requests.post(url, headers=headers)\r\n#將整個網站的程式碼爬下來\r\nsoup = BeautifulSoup(list_req.content, \"html.parser\")\r\n#抓取想要的資料\r\nsoup.find('td',{'style':'color:red'}).text\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def send_email(mconn, mailto, mailfrom, mailsub, msgbody):
msg = MIMEText(msgbody)
msg['Subject'] = mailsub
msg['To'] = mailto
msg['From'] = mailfrom
mconn.sendmail(mailfrom, mailto, msg.as_string())
mconn.quit()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def init_mail(server, user, pwd, port=25):
server = smtplib.SMTP(server, port)
server.starttls()
server.login(user, pwd)
return server
def send_email(mconn, mailto, mailfrom, mailsub, msgbody):
msg = MIMEText(msgbody)
msg['Subject'] = mailsub
msg['To'] = mailto
msg['From'] = mailfrom
mconn.sendmail(mailfrom, mailto, msg.as_string())
mconn.quit()
<|reserved_special_token_1|>
from email.mime.text import MIMEText
import smtplib
def init_mail(server, user, pwd, port=25):
server = smtplib.SMTP(server, port)
server.starttls()
server.login(user, pwd)
return server
def send_email(mconn, mailto, mailfrom, mailsub, msgbody):
msg = MIMEText(msgbody)
msg['Subject'] = mailsub
msg['To'] = mailto
msg['From'] = mailfrom
mconn.sendmail(mailfrom, mailto, msg.as_string())
mconn.quit()
|
flexible
|
{
"blob_id": "ec604aea28dfb2909ac9e4b0f15e6b5bbe1c3446",
"index": 2934,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef send_email(mconn, mailto, mailfrom, mailsub, msgbody):\n msg = MIMEText(msgbody)\n msg['Subject'] = mailsub\n msg['To'] = mailto\n msg['From'] = mailfrom\n mconn.sendmail(mailfrom, mailto, msg.as_string())\n mconn.quit()\n",
"step-3": "<mask token>\n\n\ndef init_mail(server, user, pwd, port=25):\n server = smtplib.SMTP(server, port)\n server.starttls()\n server.login(user, pwd)\n return server\n\n\ndef send_email(mconn, mailto, mailfrom, mailsub, msgbody):\n msg = MIMEText(msgbody)\n msg['Subject'] = mailsub\n msg['To'] = mailto\n msg['From'] = mailfrom\n mconn.sendmail(mailfrom, mailto, msg.as_string())\n mconn.quit()\n",
"step-4": "from email.mime.text import MIMEText\nimport smtplib\n\n\ndef init_mail(server, user, pwd, port=25):\n server = smtplib.SMTP(server, port)\n server.starttls()\n server.login(user, pwd)\n return server\n\n\ndef send_email(mconn, mailto, mailfrom, mailsub, msgbody):\n msg = MIMEText(msgbody)\n msg['Subject'] = mailsub\n msg['To'] = mailto\n msg['From'] = mailfrom\n mconn.sendmail(mailfrom, mailto, msg.as_string())\n mconn.quit()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class User:
account = []
def __init__(self,balance,int_rate):
self.balance = balance
self.int_rate = int_rate
User.account.append(self)
def dep(self,amount):
self.balance += amount
return self
def make_withdrawal(self,amount):
if(self.balance-amount) >= 0:
self.balance -= amount
else:
print("Insufficient funds:Charging a $5 fee")
self.balance -= 5
return self
def display_account_info(self):
print(self.balance) #print(f"Balance:{self.balance}")
return(self)
def yield_interest(self):
# self.balance+=(self.balance*self.int_rate)#times by a decimal gets you a smaller number
self.balance=self.balance+self.balance*self.int_rate
return(self)
@classmethod
def we_call_cls(cls):
for account in cls.account:
account.display_account_info()
class Jedi:
def __init__(self,name):
self.name = name #this means that its name is its name.
self.account = {
"Grey": User(5000,.3),
"light": User(300,.33)
}
prey=Jedi('prey')
print(prey.name)
prey.we_call_cls()
|
normal
|
{
"blob_id": "ff3f6d50498f58f3a340e2d690165efcc1a5fb1d",
"index": 6000,
"step-1": "class User:\n <mask token>\n\n def __init__(self, balance, int_rate):\n self.balance = balance\n self.int_rate = int_rate\n User.account.append(self)\n\n def dep(self, amount):\n self.balance += amount\n return self\n\n def make_withdrawal(self, amount):\n if self.balance - amount >= 0:\n self.balance -= amount\n else:\n print('Insufficient funds:Charging a $5 fee')\n self.balance -= 5\n return self\n\n def display_account_info(self):\n print(self.balance)\n return self\n <mask token>\n\n @classmethod\n def we_call_cls(cls):\n for account in cls.account:\n account.display_account_info()\n\n\nclass Jedi:\n\n def __init__(self, name):\n self.name = name\n self.account = {'Grey': User(5000, 0.3), 'light': User(300, 0.33)}\n\n\n<mask token>\n",
"step-2": "class User:\n account = []\n\n def __init__(self, balance, int_rate):\n self.balance = balance\n self.int_rate = int_rate\n User.account.append(self)\n\n def dep(self, amount):\n self.balance += amount\n return self\n\n def make_withdrawal(self, amount):\n if self.balance - amount >= 0:\n self.balance -= amount\n else:\n print('Insufficient funds:Charging a $5 fee')\n self.balance -= 5\n return self\n\n def display_account_info(self):\n print(self.balance)\n return self\n\n def yield_interest(self):\n self.balance = self.balance + self.balance * self.int_rate\n return self\n\n @classmethod\n def we_call_cls(cls):\n for account in cls.account:\n account.display_account_info()\n\n\nclass Jedi:\n\n def __init__(self, name):\n self.name = name\n self.account = {'Grey': User(5000, 0.3), 'light': User(300, 0.33)}\n\n\n<mask token>\n",
"step-3": "class User:\n account = []\n\n def __init__(self, balance, int_rate):\n self.balance = balance\n self.int_rate = int_rate\n User.account.append(self)\n\n def dep(self, amount):\n self.balance += amount\n return self\n\n def make_withdrawal(self, amount):\n if self.balance - amount >= 0:\n self.balance -= amount\n else:\n print('Insufficient funds:Charging a $5 fee')\n self.balance -= 5\n return self\n\n def display_account_info(self):\n print(self.balance)\n return self\n\n def yield_interest(self):\n self.balance = self.balance + self.balance * self.int_rate\n return self\n\n @classmethod\n def we_call_cls(cls):\n for account in cls.account:\n account.display_account_info()\n\n\nclass Jedi:\n\n def __init__(self, name):\n self.name = name\n self.account = {'Grey': User(5000, 0.3), 'light': User(300, 0.33)}\n\n\n<mask token>\nprint(prey.name)\nprey.we_call_cls()\n",
"step-4": "class User:\n account = []\n\n def __init__(self, balance, int_rate):\n self.balance = balance\n self.int_rate = int_rate\n User.account.append(self)\n\n def dep(self, amount):\n self.balance += amount\n return self\n\n def make_withdrawal(self, amount):\n if self.balance - amount >= 0:\n self.balance -= amount\n else:\n print('Insufficient funds:Charging a $5 fee')\n self.balance -= 5\n return self\n\n def display_account_info(self):\n print(self.balance)\n return self\n\n def yield_interest(self):\n self.balance = self.balance + self.balance * self.int_rate\n return self\n\n @classmethod\n def we_call_cls(cls):\n for account in cls.account:\n account.display_account_info()\n\n\nclass Jedi:\n\n def __init__(self, name):\n self.name = name\n self.account = {'Grey': User(5000, 0.3), 'light': User(300, 0.33)}\n\n\nprey = Jedi('prey')\nprint(prey.name)\nprey.we_call_cls()\n",
"step-5": "class User:\n account = []\n def __init__(self,balance,int_rate):\n self.balance = balance\n self.int_rate = int_rate\n User.account.append(self)\n def dep(self,amount):\n self.balance += amount\n return self\n\n def make_withdrawal(self,amount):\n if(self.balance-amount) >= 0:\n self.balance -= amount\n else:\n print(\"Insufficient funds:Charging a $5 fee\")\n self.balance -= 5\n return self\n \n def display_account_info(self):\n print(self.balance) #print(f\"Balance:{self.balance}\")\n return(self)\n \n def yield_interest(self):\n # self.balance+=(self.balance*self.int_rate)#times by a decimal gets you a smaller number\n self.balance=self.balance+self.balance*self.int_rate\n return(self)\n\n @classmethod\n def we_call_cls(cls):\n for account in cls.account:\n account.display_account_info()\n \n\nclass Jedi:\n def __init__(self,name):\n self.name = name #this means that its name is its name.\n self.account = {\n \"Grey\": User(5000,.3),\n \"light\": User(300,.33)\n }\n\nprey=Jedi('prey')\nprint(prey.name)\n\n\nprey.we_call_cls()\n\n\n\n",
"step-ids": [
8,
10,
11,
12,
13
]
}
|
[
8,
10,
11,
12,
13
] |
class Person:
def __init__(self,mood):
self.mood=mood;
def laugh(self):
self.mood.laugh()
def cry(self):
self.mood.cry()
def setMood(self, mood):
self.mood=mood
class Mood:
def laugh(self):
pass
def cry(self):
pass
class HappyMood(Mood):
def laugh(self):
print 'Ha ha ha!'
class SadMood(Mood):
def cry(self):
print 'Sniff sniff'
p=Person(HappyMood())
p.laugh()
p.cry()
p.setMood(SadMood())
p.laugh()
p.cry()
|
normal
|
{
"blob_id": "4deb691545887104b3fb70dd2be52138088ba1e8",
"index": 1751,
"step-1": "class Person:\n\tdef __init__(self,mood):\n\t\tself.mood=mood;\n\n\tdef laugh(self):\n\t\tself.mood.laugh()\n\n\tdef cry(self):\n\t\tself.mood.cry()\n\t\n\tdef setMood(self, mood):\n\t\tself.mood=mood\n\nclass Mood:\n\tdef laugh(self):\n\t\tpass\n\tdef cry(self):\n\t\tpass\n\nclass HappyMood(Mood):\n\tdef laugh(self):\n\t\tprint 'Ha ha ha!'\n\nclass SadMood(Mood):\n\tdef cry(self):\n\t\tprint 'Sniff sniff'\n\np=Person(HappyMood())\np.laugh()\np.cry()\n\np.setMood(SadMood())\np.laugh()\np.cry()\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
"""Module to convert a lanelet UTM representation to OSM."""
__author__ = "Benjamin Orthen"
__copyright__ = "TUM Cyber-Physical Systems Group"
__credits__ = ["Priority Program SPP 1835 Cooperative Interacting Automobiles"]
__version__ = "1.1.2"
__maintainer__ = "Benjamin Orthen"
__email__ = "[email protected]"
__status__ = "Released"
from typing import List, Tuple
import numpy as np
from pyproj import Proj
from commonroad.scenario.lanelet import Lanelet
from opendrive2lanelet.osm.osm import OSM, Node, Way, WayRelation, DEFAULT_PROJ_STRING
ways_are_equal_tolerance = 0.001
class L2OSMConverter:
"""Class to convert CommonRoad lanelet to the OSM representation."""
def __init__(self, proj_string):
if proj_string:
self.proj = Proj(proj_string)
else:
self.proj = Proj(DEFAULT_PROJ_STRING)
self.osm = None
self._id_count = -1
self.first_nodes, self.last_nodes = None, None
self.left_ways, self.right_ways = None, None
self.lanelet_network = None
@property
def id_count(self) -> int:
"""Internal counter for giving IDs to the members of the OSM.
Each call returns the count and increases it by one.
Returns:
Current id count.
"""
tmp = self._id_count
self._id_count -= 1
return tmp
def __call__(self, scenario):
"""Convert a scenario to an OSM xml document.
Args:
scenario:
"""
self.osm = OSM()
self.lanelet_network = scenario.lanelet_network
self.first_nodes = dict() # saves first left and right node
self.last_nodes = dict() # saves last left and right node
self.left_ways = dict()
self.right_ways = dict()
for lanelet in scenario.lanelet_network.lanelets:
self._convert_lanelet(lanelet)
return self.osm.serialize_to_xml()
def _convert_lanelet(self, lanelet: Lanelet):
"""Convert a lanelet to a way relation.
Add the resulting relation and its ways and nodes to the OSM.
Args:
lanelet: Lanelet to be converted.
"""
# check if there are shared ways
right_way_id = self._get_potential_right_way(lanelet)
left_way_id = self._get_potential_left_way(lanelet)
left_nodes, right_nodes = self._create_nodes(lanelet, left_way_id, right_way_id)
self.first_nodes[lanelet.lanelet_id] = (left_nodes[0], right_nodes[0])
self.last_nodes[lanelet.lanelet_id] = (left_nodes[-1], right_nodes[-1])
if not left_way_id:
left_way = Way(self.id_count, *left_nodes)
self.osm.add_way(left_way)
left_way_id = left_way.id_
if not right_way_id:
right_way = Way(self.id_count, *right_nodes)
self.osm.add_way(right_way)
right_way_id = right_way.id_
self.left_ways[lanelet.lanelet_id] = left_way_id
self.right_ways[lanelet.lanelet_id] = right_way_id
self.osm.add_way_relation(WayRelation(self.id_count, left_way_id, right_way_id))
def _create_nodes(
self, lanelet: Lanelet, left_way_id: str, right_way_id: str
) -> Tuple[List[str], List[str]]:
"""Create new nodes for the ways of the lanelet.
Add them to OSM and return a list of the node ids.
In case a left or right way already exists, the returned list
only contains the first and last node of the way.
Args:
lanelet: Lanelet of which the right and left vertices should be converted to ways.
left_way_id: Id of a potential shared left way which was already converted.
If this is not None, the left vertices of the lanelet do not have to be converted again.
right_way_id: Id of a potential right way, similar to left_way_id.
Returns:
A tuple of lists of node ids for the left and the right way.
"""
left_nodes, right_nodes = [], []
start_index = 0
end_index = len(lanelet.left_vertices)
pot_first_left_node, pot_first_right_node = self._get_shared_first_nodes_from_other_lanelets(
lanelet
)
pot_last_left_node, pot_last_right_node = self._get_shared_last_nodes_from_other_lanelets(
lanelet
)
if pot_first_left_node:
start_index = 1
if pot_last_left_node:
end_index = -1
if left_way_id:
first_left_node, last_left_node = self._get_first_and_last_nodes_from_way(
left_way_id, lanelet.adj_left_same_direction
)
else:
first_left_node = pot_first_left_node
last_left_node = pot_last_left_node
left_nodes = self._create_nodes_from_vertices(
lanelet.left_vertices[start_index:end_index]
)
if right_way_id:
first_right_node, last_right_node = self._get_first_and_last_nodes_from_way(
right_way_id, lanelet.adj_right_same_direction
)
else:
first_right_node = pot_first_right_node
last_right_node = pot_last_right_node
right_nodes = self._create_nodes_from_vertices(
lanelet.right_vertices[start_index:end_index]
)
if first_left_node:
left_nodes.insert(0, first_left_node)
if first_right_node:
right_nodes.insert(0, first_right_node)
if last_left_node:
left_nodes.append(last_left_node)
if last_right_node:
right_nodes.append(last_right_node)
return left_nodes, right_nodes
def _get_first_and_last_nodes_from_way(
self, way_id: str, same_dir: bool
) -> Tuple[str, str]:
"""Get the first and the last node of a way.
Reverse order of nodes if way is reversed.
Args:
way_id: Id of way.
same_dir: True if way is in normal direction, False if it is reversed.
Returns:
Tuple with first and last node.
"""
way = self.osm.find_way_by_id(way_id)
first_idx, last_idx = (0, -1) if same_dir else (-1, 0)
return (way.nodes[first_idx], way.nodes[last_idx])
def _create_nodes_from_vertices(self, vertices: List[np.ndarray]) -> List[str]:
"""Create nodes and add them to the OSM.
Args:
vertices: List of vertices from a lanelet boundary.
Returns:
Ids of nodes which were created.
"""
nodes = []
for vertice in vertices:
lon, lat = self.proj(vertice[0], vertice[1], inverse=True)
node = Node(self.id_count, lat, lon)
nodes.append(node.id_)
self.osm.add_node(node)
return nodes
def _get_potential_right_way(self, lanelet):
"""Check if a shared right boundary with another lanelet can be transformed
to the same way.
Args:
lanelet: Lanelet of which right boundary should be converted to a way.
Returns:
Id of a way which can be shared, else None if it is not possible.
"""
if lanelet.adj_right:
if lanelet.adj_right_same_direction:
potential_right_way = self.left_ways.get(lanelet.adj_right)
else:
potential_right_way = self.right_ways.get(lanelet.adj_right)
if potential_right_way:
adj_right = self.lanelet_network.find_lanelet_by_id(lanelet.adj_right)
vertices = (
adj_right.left_vertices
if lanelet.adj_right_same_direction
else adj_right.right_vertices[::-1]
)
if _vertices_are_equal(lanelet.right_vertices, vertices):
return potential_right_way
return None
def _get_potential_left_way(self, lanelet):
"""Check if a shared left boundary with another lanelet can be transformed
to the same way.
Args:
lanelet: Lanelet of which left boundary should be converted to a way.
Returns:
Id of a way which can be shared, else None if it is not possible.
"""
if lanelet.adj_left:
if lanelet.adj_left_same_direction:
potential_left_way = self.right_ways.get(lanelet.adj_left)
else:
potential_left_way = self.left_ways.get(lanelet.adj_left)
if potential_left_way:
adj_left = self.lanelet_network.find_lanelet_by_id(lanelet.adj_left)
vertices = (
adj_left.right_vertices
if lanelet.adj_left_same_direction
else adj_left.left_vertices[::-1]
)
if _vertices_are_equal(lanelet.left_vertices, vertices):
return potential_left_way
return None
def _get_shared_first_nodes_from_other_lanelets(
self, lanelet: Lanelet
) -> Tuple[str, str]:
"""Get already created nodes from other lanelets which could also
be used by this lanelet as first nodes.
Args:
lanelet: Lanelet for which shared nodes should be found.
Returns:
Id of first left and first right node if they exist.
"""
if lanelet.predecessor:
for lanelet_id in lanelet.predecessor:
first_left_node, first_right_node = self.last_nodes.get(
lanelet_id, (None, None)
)
if first_left_node:
return first_left_node, first_right_node
for pred_id in lanelet.predecessor:
pred = self.lanelet_network.find_lanelet_by_id(pred_id)
for succ_id in pred.successor:
first_left_node, first_right_node = self.first_nodes.get(
succ_id, (None, None)
)
if first_left_node:
return first_left_node, first_right_node
return None, None
def _get_shared_last_nodes_from_other_lanelets(
self, lanelet: Lanelet
) -> Tuple[str, str]:
"""Get already created nodes from other lanelets which could also
be used by this lanelet as last nodes.
Args:
lanelet: Lanelet for which shared nodes should be found.
Returns:
Id of last left and last right node if they exist.
"""
if lanelet.successor:
for lanelet_id in lanelet.successor:
last_left_node, last_right_node = self.first_nodes.get(
lanelet_id, (None, None)
)
if last_left_node:
return last_left_node, last_right_node
for succ_id in lanelet.successor:
succ = self.lanelet_network.find_lanelet_by_id(succ_id)
for pred_id in succ.predecessor:
last_left_node, last_right_node = self.last_nodes.get(
pred_id, (None, None)
)
if last_left_node:
return last_left_node, last_right_node
return None, None
def _vertices_are_equal(
vertices1: List[np.ndarray], vertices2: List[np.ndarray]
) -> bool:
"""Checks if two list of vertices are equal up to a tolerance.
Args:
vertices1: First vertices to compare.
vertices2: Second vertices to compare.
Returns:
True if every vertice in one list is nearly equal to the
corresponding vertices at the same position in the other list.
"""
if len(vertices1) != len(vertices2):
return False
diff = vertices1 - vertices2
if np.abs(np.max(diff)) < ways_are_equal_tolerance:
return True
return False
|
normal
|
{
"blob_id": "472c8b0649e29c31b144607080938793e5f1293e",
"index": 6834,
"step-1": "<mask token>\n\n\nclass L2OSMConverter:\n <mask token>\n\n def __init__(self, proj_string):\n if proj_string:\n self.proj = Proj(proj_string)\n else:\n self.proj = Proj(DEFAULT_PROJ_STRING)\n self.osm = None\n self._id_count = -1\n self.first_nodes, self.last_nodes = None, None\n self.left_ways, self.right_ways = None, None\n self.lanelet_network = None\n\n @property\n def id_count(self) ->int:\n \"\"\"Internal counter for giving IDs to the members of the OSM.\n\n Each call returns the count and increases it by one.\n Returns:\n Current id count.\n \"\"\"\n tmp = self._id_count\n self._id_count -= 1\n return tmp\n\n def __call__(self, scenario):\n \"\"\"Convert a scenario to an OSM xml document.\n\n Args:\n scenario:\n \"\"\"\n self.osm = OSM()\n self.lanelet_network = scenario.lanelet_network\n self.first_nodes = dict()\n self.last_nodes = dict()\n self.left_ways = dict()\n self.right_ways = dict()\n for lanelet in scenario.lanelet_network.lanelets:\n self._convert_lanelet(lanelet)\n return self.osm.serialize_to_xml()\n <mask token>\n <mask token>\n <mask token>\n\n def _create_nodes_from_vertices(self, vertices: List[np.ndarray]) ->List[\n str]:\n \"\"\"Create nodes and add them to the OSM.\n\n Args:\n vertices: List of vertices from a lanelet boundary.\n Returns:\n Ids of nodes which were created.\n \"\"\"\n nodes = []\n for vertice in vertices:\n lon, lat = self.proj(vertice[0], vertice[1], inverse=True)\n node = Node(self.id_count, lat, lon)\n nodes.append(node.id_)\n self.osm.add_node(node)\n return nodes\n <mask token>\n\n def _get_potential_left_way(self, lanelet):\n \"\"\"Check if a shared left boundary with another lanelet can be transformed\n to the same way.\n\n Args:\n lanelet: Lanelet of which left boundary should be converted to a way.\n Returns:\n Id of a way which can be shared, else None if it is not possible.\n \"\"\"\n if lanelet.adj_left:\n if lanelet.adj_left_same_direction:\n potential_left_way = self.right_ways.get(lanelet.adj_left)\n else:\n potential_left_way = self.left_ways.get(lanelet.adj_left)\n if potential_left_way:\n adj_left = self.lanelet_network.find_lanelet_by_id(lanelet.\n adj_left)\n vertices = (adj_left.right_vertices if lanelet.\n adj_left_same_direction else adj_left.left_vertices[::-1])\n if _vertices_are_equal(lanelet.left_vertices, vertices):\n return potential_left_way\n return None\n\n def _get_shared_first_nodes_from_other_lanelets(self, lanelet: Lanelet\n ) ->Tuple[str, str]:\n \"\"\"Get already created nodes from other lanelets which could also\n be used by this lanelet as first nodes.\n\n Args:\n lanelet: Lanelet for which shared nodes should be found.\n Returns:\n Id of first left and first right node if they exist.\n \"\"\"\n if lanelet.predecessor:\n for lanelet_id in lanelet.predecessor:\n first_left_node, first_right_node = self.last_nodes.get(\n lanelet_id, (None, None))\n if first_left_node:\n return first_left_node, first_right_node\n for pred_id in lanelet.predecessor:\n pred = self.lanelet_network.find_lanelet_by_id(pred_id)\n for succ_id in pred.successor:\n first_left_node, first_right_node = self.first_nodes.get(\n succ_id, (None, None))\n if first_left_node:\n return first_left_node, first_right_node\n return None, None\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass L2OSMConverter:\n <mask token>\n\n def __init__(self, proj_string):\n if proj_string:\n self.proj = Proj(proj_string)\n else:\n self.proj = Proj(DEFAULT_PROJ_STRING)\n self.osm = None\n self._id_count = -1\n self.first_nodes, self.last_nodes = None, None\n self.left_ways, self.right_ways = None, None\n self.lanelet_network = None\n\n @property\n def id_count(self) ->int:\n \"\"\"Internal counter for giving IDs to the members of the OSM.\n\n Each call returns the count and increases it by one.\n Returns:\n Current id count.\n \"\"\"\n tmp = self._id_count\n self._id_count -= 1\n return tmp\n\n def __call__(self, scenario):\n \"\"\"Convert a scenario to an OSM xml document.\n\n Args:\n scenario:\n \"\"\"\n self.osm = OSM()\n self.lanelet_network = scenario.lanelet_network\n self.first_nodes = dict()\n self.last_nodes = dict()\n self.left_ways = dict()\n self.right_ways = dict()\n for lanelet in scenario.lanelet_network.lanelets:\n self._convert_lanelet(lanelet)\n return self.osm.serialize_to_xml()\n <mask token>\n\n def _create_nodes(self, lanelet: Lanelet, left_way_id: str,\n right_way_id: str) ->Tuple[List[str], List[str]]:\n \"\"\"Create new nodes for the ways of the lanelet.\n Add them to OSM and return a list of the node ids.\n\n In case a left or right way already exists, the returned list\n only contains the first and last node of the way.\n Args:\n lanelet: Lanelet of which the right and left vertices should be converted to ways.\n left_way_id: Id of a potential shared left way which was already converted.\n If this is not None, the left vertices of the lanelet do not have to be converted again.\n right_way_id: Id of a potential right way, similar to left_way_id.\n Returns:\n A tuple of lists of node ids for the left and the right way.\n \"\"\"\n left_nodes, right_nodes = [], []\n start_index = 0\n end_index = len(lanelet.left_vertices)\n pot_first_left_node, pot_first_right_node = (self.\n _get_shared_first_nodes_from_other_lanelets(lanelet))\n pot_last_left_node, pot_last_right_node = (self.\n _get_shared_last_nodes_from_other_lanelets(lanelet))\n if pot_first_left_node:\n start_index = 1\n if pot_last_left_node:\n end_index = -1\n if left_way_id:\n first_left_node, last_left_node = (self.\n _get_first_and_last_nodes_from_way(left_way_id, lanelet.\n adj_left_same_direction))\n else:\n first_left_node = pot_first_left_node\n last_left_node = pot_last_left_node\n left_nodes = self._create_nodes_from_vertices(lanelet.\n left_vertices[start_index:end_index])\n if right_way_id:\n first_right_node, last_right_node = (self.\n _get_first_and_last_nodes_from_way(right_way_id, lanelet.\n adj_right_same_direction))\n else:\n first_right_node = pot_first_right_node\n last_right_node = pot_last_right_node\n right_nodes = self._create_nodes_from_vertices(lanelet.\n right_vertices[start_index:end_index])\n if first_left_node:\n left_nodes.insert(0, first_left_node)\n if first_right_node:\n right_nodes.insert(0, first_right_node)\n if last_left_node:\n left_nodes.append(last_left_node)\n if last_right_node:\n right_nodes.append(last_right_node)\n return left_nodes, right_nodes\n <mask token>\n\n def _create_nodes_from_vertices(self, vertices: List[np.ndarray]) ->List[\n str]:\n \"\"\"Create nodes and add them to the OSM.\n\n Args:\n vertices: List of vertices from a lanelet boundary.\n Returns:\n Ids of nodes which were created.\n \"\"\"\n nodes = []\n for vertice in vertices:\n lon, lat = self.proj(vertice[0], vertice[1], inverse=True)\n node = Node(self.id_count, lat, lon)\n nodes.append(node.id_)\n self.osm.add_node(node)\n return nodes\n <mask token>\n\n def _get_potential_left_way(self, lanelet):\n \"\"\"Check if a shared left boundary with another lanelet can be transformed\n to the same way.\n\n Args:\n lanelet: Lanelet of which left boundary should be converted to a way.\n Returns:\n Id of a way which can be shared, else None if it is not possible.\n \"\"\"\n if lanelet.adj_left:\n if lanelet.adj_left_same_direction:\n potential_left_way = self.right_ways.get(lanelet.adj_left)\n else:\n potential_left_way = self.left_ways.get(lanelet.adj_left)\n if potential_left_way:\n adj_left = self.lanelet_network.find_lanelet_by_id(lanelet.\n adj_left)\n vertices = (adj_left.right_vertices if lanelet.\n adj_left_same_direction else adj_left.left_vertices[::-1])\n if _vertices_are_equal(lanelet.left_vertices, vertices):\n return potential_left_way\n return None\n\n def _get_shared_first_nodes_from_other_lanelets(self, lanelet: Lanelet\n ) ->Tuple[str, str]:\n \"\"\"Get already created nodes from other lanelets which could also\n be used by this lanelet as first nodes.\n\n Args:\n lanelet: Lanelet for which shared nodes should be found.\n Returns:\n Id of first left and first right node if they exist.\n \"\"\"\n if lanelet.predecessor:\n for lanelet_id in lanelet.predecessor:\n first_left_node, first_right_node = self.last_nodes.get(\n lanelet_id, (None, None))\n if first_left_node:\n return first_left_node, first_right_node\n for pred_id in lanelet.predecessor:\n pred = self.lanelet_network.find_lanelet_by_id(pred_id)\n for succ_id in pred.successor:\n first_left_node, first_right_node = self.first_nodes.get(\n succ_id, (None, None))\n if first_left_node:\n return first_left_node, first_right_node\n return None, None\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass L2OSMConverter:\n <mask token>\n\n def __init__(self, proj_string):\n if proj_string:\n self.proj = Proj(proj_string)\n else:\n self.proj = Proj(DEFAULT_PROJ_STRING)\n self.osm = None\n self._id_count = -1\n self.first_nodes, self.last_nodes = None, None\n self.left_ways, self.right_ways = None, None\n self.lanelet_network = None\n\n @property\n def id_count(self) ->int:\n \"\"\"Internal counter for giving IDs to the members of the OSM.\n\n Each call returns the count and increases it by one.\n Returns:\n Current id count.\n \"\"\"\n tmp = self._id_count\n self._id_count -= 1\n return tmp\n\n def __call__(self, scenario):\n \"\"\"Convert a scenario to an OSM xml document.\n\n Args:\n scenario:\n \"\"\"\n self.osm = OSM()\n self.lanelet_network = scenario.lanelet_network\n self.first_nodes = dict()\n self.last_nodes = dict()\n self.left_ways = dict()\n self.right_ways = dict()\n for lanelet in scenario.lanelet_network.lanelets:\n self._convert_lanelet(lanelet)\n return self.osm.serialize_to_xml()\n\n def _convert_lanelet(self, lanelet: Lanelet):\n \"\"\"Convert a lanelet to a way relation.\n\n Add the resulting relation and its ways and nodes to the OSM.\n\n Args:\n lanelet: Lanelet to be converted.\n \"\"\"\n right_way_id = self._get_potential_right_way(lanelet)\n left_way_id = self._get_potential_left_way(lanelet)\n left_nodes, right_nodes = self._create_nodes(lanelet, left_way_id,\n right_way_id)\n self.first_nodes[lanelet.lanelet_id] = left_nodes[0], right_nodes[0]\n self.last_nodes[lanelet.lanelet_id] = left_nodes[-1], right_nodes[-1]\n if not left_way_id:\n left_way = Way(self.id_count, *left_nodes)\n self.osm.add_way(left_way)\n left_way_id = left_way.id_\n if not right_way_id:\n right_way = Way(self.id_count, *right_nodes)\n self.osm.add_way(right_way)\n right_way_id = right_way.id_\n self.left_ways[lanelet.lanelet_id] = left_way_id\n self.right_ways[lanelet.lanelet_id] = right_way_id\n self.osm.add_way_relation(WayRelation(self.id_count, left_way_id,\n right_way_id))\n\n def _create_nodes(self, lanelet: Lanelet, left_way_id: str,\n right_way_id: str) ->Tuple[List[str], List[str]]:\n \"\"\"Create new nodes for the ways of the lanelet.\n Add them to OSM and return a list of the node ids.\n\n In case a left or right way already exists, the returned list\n only contains the first and last node of the way.\n Args:\n lanelet: Lanelet of which the right and left vertices should be converted to ways.\n left_way_id: Id of a potential shared left way which was already converted.\n If this is not None, the left vertices of the lanelet do not have to be converted again.\n right_way_id: Id of a potential right way, similar to left_way_id.\n Returns:\n A tuple of lists of node ids for the left and the right way.\n \"\"\"\n left_nodes, right_nodes = [], []\n start_index = 0\n end_index = len(lanelet.left_vertices)\n pot_first_left_node, pot_first_right_node = (self.\n _get_shared_first_nodes_from_other_lanelets(lanelet))\n pot_last_left_node, pot_last_right_node = (self.\n _get_shared_last_nodes_from_other_lanelets(lanelet))\n if pot_first_left_node:\n start_index = 1\n if pot_last_left_node:\n end_index = -1\n if left_way_id:\n first_left_node, last_left_node = (self.\n _get_first_and_last_nodes_from_way(left_way_id, lanelet.\n adj_left_same_direction))\n else:\n first_left_node = pot_first_left_node\n last_left_node = pot_last_left_node\n left_nodes = self._create_nodes_from_vertices(lanelet.\n left_vertices[start_index:end_index])\n if right_way_id:\n first_right_node, last_right_node = (self.\n _get_first_and_last_nodes_from_way(right_way_id, lanelet.\n adj_right_same_direction))\n else:\n first_right_node = pot_first_right_node\n last_right_node = pot_last_right_node\n right_nodes = self._create_nodes_from_vertices(lanelet.\n right_vertices[start_index:end_index])\n if first_left_node:\n left_nodes.insert(0, first_left_node)\n if first_right_node:\n right_nodes.insert(0, first_right_node)\n if last_left_node:\n left_nodes.append(last_left_node)\n if last_right_node:\n right_nodes.append(last_right_node)\n return left_nodes, right_nodes\n <mask token>\n\n def _create_nodes_from_vertices(self, vertices: List[np.ndarray]) ->List[\n str]:\n \"\"\"Create nodes and add them to the OSM.\n\n Args:\n vertices: List of vertices from a lanelet boundary.\n Returns:\n Ids of nodes which were created.\n \"\"\"\n nodes = []\n for vertice in vertices:\n lon, lat = self.proj(vertice[0], vertice[1], inverse=True)\n node = Node(self.id_count, lat, lon)\n nodes.append(node.id_)\n self.osm.add_node(node)\n return nodes\n\n def _get_potential_right_way(self, lanelet):\n \"\"\"Check if a shared right boundary with another lanelet can be transformed\n to the same way.\n\n Args:\n lanelet: Lanelet of which right boundary should be converted to a way.\n Returns:\n Id of a way which can be shared, else None if it is not possible.\n \"\"\"\n if lanelet.adj_right:\n if lanelet.adj_right_same_direction:\n potential_right_way = self.left_ways.get(lanelet.adj_right)\n else:\n potential_right_way = self.right_ways.get(lanelet.adj_right)\n if potential_right_way:\n adj_right = self.lanelet_network.find_lanelet_by_id(lanelet\n .adj_right)\n vertices = (adj_right.left_vertices if lanelet.\n adj_right_same_direction else adj_right.right_vertices[\n ::-1])\n if _vertices_are_equal(lanelet.right_vertices, vertices):\n return potential_right_way\n return None\n\n def _get_potential_left_way(self, lanelet):\n \"\"\"Check if a shared left boundary with another lanelet can be transformed\n to the same way.\n\n Args:\n lanelet: Lanelet of which left boundary should be converted to a way.\n Returns:\n Id of a way which can be shared, else None if it is not possible.\n \"\"\"\n if lanelet.adj_left:\n if lanelet.adj_left_same_direction:\n potential_left_way = self.right_ways.get(lanelet.adj_left)\n else:\n potential_left_way = self.left_ways.get(lanelet.adj_left)\n if potential_left_way:\n adj_left = self.lanelet_network.find_lanelet_by_id(lanelet.\n adj_left)\n vertices = (adj_left.right_vertices if lanelet.\n adj_left_same_direction else adj_left.left_vertices[::-1])\n if _vertices_are_equal(lanelet.left_vertices, vertices):\n return potential_left_way\n return None\n\n def _get_shared_first_nodes_from_other_lanelets(self, lanelet: Lanelet\n ) ->Tuple[str, str]:\n \"\"\"Get already created nodes from other lanelets which could also\n be used by this lanelet as first nodes.\n\n Args:\n lanelet: Lanelet for which shared nodes should be found.\n Returns:\n Id of first left and first right node if they exist.\n \"\"\"\n if lanelet.predecessor:\n for lanelet_id in lanelet.predecessor:\n first_left_node, first_right_node = self.last_nodes.get(\n lanelet_id, (None, None))\n if first_left_node:\n return first_left_node, first_right_node\n for pred_id in lanelet.predecessor:\n pred = self.lanelet_network.find_lanelet_by_id(pred_id)\n for succ_id in pred.successor:\n first_left_node, first_right_node = self.first_nodes.get(\n succ_id, (None, None))\n if first_left_node:\n return first_left_node, first_right_node\n return None, None\n <mask token>\n\n\n<mask token>\n",
"step-4": "<mask token>\n__author__ = 'Benjamin Orthen'\n__copyright__ = 'TUM Cyber-Physical Systems Group'\n__credits__ = ['Priority Program SPP 1835 Cooperative Interacting Automobiles']\n__version__ = '1.1.2'\n__maintainer__ = 'Benjamin Orthen'\n__email__ = '[email protected]'\n__status__ = 'Released'\nfrom typing import List, Tuple\nimport numpy as np\nfrom pyproj import Proj\nfrom commonroad.scenario.lanelet import Lanelet\nfrom opendrive2lanelet.osm.osm import OSM, Node, Way, WayRelation, DEFAULT_PROJ_STRING\nways_are_equal_tolerance = 0.001\n\n\nclass L2OSMConverter:\n \"\"\"Class to convert CommonRoad lanelet to the OSM representation.\"\"\"\n\n def __init__(self, proj_string):\n if proj_string:\n self.proj = Proj(proj_string)\n else:\n self.proj = Proj(DEFAULT_PROJ_STRING)\n self.osm = None\n self._id_count = -1\n self.first_nodes, self.last_nodes = None, None\n self.left_ways, self.right_ways = None, None\n self.lanelet_network = None\n\n @property\n def id_count(self) ->int:\n \"\"\"Internal counter for giving IDs to the members of the OSM.\n\n Each call returns the count and increases it by one.\n Returns:\n Current id count.\n \"\"\"\n tmp = self._id_count\n self._id_count -= 1\n return tmp\n\n def __call__(self, scenario):\n \"\"\"Convert a scenario to an OSM xml document.\n\n Args:\n scenario:\n \"\"\"\n self.osm = OSM()\n self.lanelet_network = scenario.lanelet_network\n self.first_nodes = dict()\n self.last_nodes = dict()\n self.left_ways = dict()\n self.right_ways = dict()\n for lanelet in scenario.lanelet_network.lanelets:\n self._convert_lanelet(lanelet)\n return self.osm.serialize_to_xml()\n\n def _convert_lanelet(self, lanelet: Lanelet):\n \"\"\"Convert a lanelet to a way relation.\n\n Add the resulting relation and its ways and nodes to the OSM.\n\n Args:\n lanelet: Lanelet to be converted.\n \"\"\"\n right_way_id = self._get_potential_right_way(lanelet)\n left_way_id = self._get_potential_left_way(lanelet)\n left_nodes, right_nodes = self._create_nodes(lanelet, left_way_id,\n right_way_id)\n self.first_nodes[lanelet.lanelet_id] = left_nodes[0], right_nodes[0]\n self.last_nodes[lanelet.lanelet_id] = left_nodes[-1], right_nodes[-1]\n if not left_way_id:\n left_way = Way(self.id_count, *left_nodes)\n self.osm.add_way(left_way)\n left_way_id = left_way.id_\n if not right_way_id:\n right_way = Way(self.id_count, *right_nodes)\n self.osm.add_way(right_way)\n right_way_id = right_way.id_\n self.left_ways[lanelet.lanelet_id] = left_way_id\n self.right_ways[lanelet.lanelet_id] = right_way_id\n self.osm.add_way_relation(WayRelation(self.id_count, left_way_id,\n right_way_id))\n\n def _create_nodes(self, lanelet: Lanelet, left_way_id: str,\n right_way_id: str) ->Tuple[List[str], List[str]]:\n \"\"\"Create new nodes for the ways of the lanelet.\n Add them to OSM and return a list of the node ids.\n\n In case a left or right way already exists, the returned list\n only contains the first and last node of the way.\n Args:\n lanelet: Lanelet of which the right and left vertices should be converted to ways.\n left_way_id: Id of a potential shared left way which was already converted.\n If this is not None, the left vertices of the lanelet do not have to be converted again.\n right_way_id: Id of a potential right way, similar to left_way_id.\n Returns:\n A tuple of lists of node ids for the left and the right way.\n \"\"\"\n left_nodes, right_nodes = [], []\n start_index = 0\n end_index = len(lanelet.left_vertices)\n pot_first_left_node, pot_first_right_node = (self.\n _get_shared_first_nodes_from_other_lanelets(lanelet))\n pot_last_left_node, pot_last_right_node = (self.\n _get_shared_last_nodes_from_other_lanelets(lanelet))\n if pot_first_left_node:\n start_index = 1\n if pot_last_left_node:\n end_index = -1\n if left_way_id:\n first_left_node, last_left_node = (self.\n _get_first_and_last_nodes_from_way(left_way_id, lanelet.\n adj_left_same_direction))\n else:\n first_left_node = pot_first_left_node\n last_left_node = pot_last_left_node\n left_nodes = self._create_nodes_from_vertices(lanelet.\n left_vertices[start_index:end_index])\n if right_way_id:\n first_right_node, last_right_node = (self.\n _get_first_and_last_nodes_from_way(right_way_id, lanelet.\n adj_right_same_direction))\n else:\n first_right_node = pot_first_right_node\n last_right_node = pot_last_right_node\n right_nodes = self._create_nodes_from_vertices(lanelet.\n right_vertices[start_index:end_index])\n if first_left_node:\n left_nodes.insert(0, first_left_node)\n if first_right_node:\n right_nodes.insert(0, first_right_node)\n if last_left_node:\n left_nodes.append(last_left_node)\n if last_right_node:\n right_nodes.append(last_right_node)\n return left_nodes, right_nodes\n\n def _get_first_and_last_nodes_from_way(self, way_id: str, same_dir: bool\n ) ->Tuple[str, str]:\n \"\"\"Get the first and the last node of a way.\n\n Reverse order of nodes if way is reversed.\n Args:\n way_id: Id of way.\n same_dir: True if way is in normal direction, False if it is reversed.\n Returns:\n Tuple with first and last node.\n \"\"\"\n way = self.osm.find_way_by_id(way_id)\n first_idx, last_idx = (0, -1) if same_dir else (-1, 0)\n return way.nodes[first_idx], way.nodes[last_idx]\n\n def _create_nodes_from_vertices(self, vertices: List[np.ndarray]) ->List[\n str]:\n \"\"\"Create nodes and add them to the OSM.\n\n Args:\n vertices: List of vertices from a lanelet boundary.\n Returns:\n Ids of nodes which were created.\n \"\"\"\n nodes = []\n for vertice in vertices:\n lon, lat = self.proj(vertice[0], vertice[1], inverse=True)\n node = Node(self.id_count, lat, lon)\n nodes.append(node.id_)\n self.osm.add_node(node)\n return nodes\n\n def _get_potential_right_way(self, lanelet):\n \"\"\"Check if a shared right boundary with another lanelet can be transformed\n to the same way.\n\n Args:\n lanelet: Lanelet of which right boundary should be converted to a way.\n Returns:\n Id of a way which can be shared, else None if it is not possible.\n \"\"\"\n if lanelet.adj_right:\n if lanelet.adj_right_same_direction:\n potential_right_way = self.left_ways.get(lanelet.adj_right)\n else:\n potential_right_way = self.right_ways.get(lanelet.adj_right)\n if potential_right_way:\n adj_right = self.lanelet_network.find_lanelet_by_id(lanelet\n .adj_right)\n vertices = (adj_right.left_vertices if lanelet.\n adj_right_same_direction else adj_right.right_vertices[\n ::-1])\n if _vertices_are_equal(lanelet.right_vertices, vertices):\n return potential_right_way\n return None\n\n def _get_potential_left_way(self, lanelet):\n \"\"\"Check if a shared left boundary with another lanelet can be transformed\n to the same way.\n\n Args:\n lanelet: Lanelet of which left boundary should be converted to a way.\n Returns:\n Id of a way which can be shared, else None if it is not possible.\n \"\"\"\n if lanelet.adj_left:\n if lanelet.adj_left_same_direction:\n potential_left_way = self.right_ways.get(lanelet.adj_left)\n else:\n potential_left_way = self.left_ways.get(lanelet.adj_left)\n if potential_left_way:\n adj_left = self.lanelet_network.find_lanelet_by_id(lanelet.\n adj_left)\n vertices = (adj_left.right_vertices if lanelet.\n adj_left_same_direction else adj_left.left_vertices[::-1])\n if _vertices_are_equal(lanelet.left_vertices, vertices):\n return potential_left_way\n return None\n\n def _get_shared_first_nodes_from_other_lanelets(self, lanelet: Lanelet\n ) ->Tuple[str, str]:\n \"\"\"Get already created nodes from other lanelets which could also\n be used by this lanelet as first nodes.\n\n Args:\n lanelet: Lanelet for which shared nodes should be found.\n Returns:\n Id of first left and first right node if they exist.\n \"\"\"\n if lanelet.predecessor:\n for lanelet_id in lanelet.predecessor:\n first_left_node, first_right_node = self.last_nodes.get(\n lanelet_id, (None, None))\n if first_left_node:\n return first_left_node, first_right_node\n for pred_id in lanelet.predecessor:\n pred = self.lanelet_network.find_lanelet_by_id(pred_id)\n for succ_id in pred.successor:\n first_left_node, first_right_node = self.first_nodes.get(\n succ_id, (None, None))\n if first_left_node:\n return first_left_node, first_right_node\n return None, None\n\n def _get_shared_last_nodes_from_other_lanelets(self, lanelet: Lanelet\n ) ->Tuple[str, str]:\n \"\"\"Get already created nodes from other lanelets which could also\n be used by this lanelet as last nodes.\n\n Args:\n lanelet: Lanelet for which shared nodes should be found.\n Returns:\n Id of last left and last right node if they exist.\n \"\"\"\n if lanelet.successor:\n for lanelet_id in lanelet.successor:\n last_left_node, last_right_node = self.first_nodes.get(\n lanelet_id, (None, None))\n if last_left_node:\n return last_left_node, last_right_node\n for succ_id in lanelet.successor:\n succ = self.lanelet_network.find_lanelet_by_id(succ_id)\n for pred_id in succ.predecessor:\n last_left_node, last_right_node = self.last_nodes.get(\n pred_id, (None, None))\n if last_left_node:\n return last_left_node, last_right_node\n return None, None\n\n\ndef _vertices_are_equal(vertices1: List[np.ndarray], vertices2: List[np.\n ndarray]) ->bool:\n \"\"\"Checks if two list of vertices are equal up to a tolerance.\n\n Args:\n vertices1: First vertices to compare.\n vertices2: Second vertices to compare.\n\n Returns:\n True if every vertice in one list is nearly equal to the\n corresponding vertices at the same position in the other list.\n \"\"\"\n if len(vertices1) != len(vertices2):\n return False\n diff = vertices1 - vertices2\n if np.abs(np.max(diff)) < ways_are_equal_tolerance:\n return True\n return False\n",
"step-5": "\"\"\"Module to convert a lanelet UTM representation to OSM.\"\"\"\n\n__author__ = \"Benjamin Orthen\"\n__copyright__ = \"TUM Cyber-Physical Systems Group\"\n__credits__ = [\"Priority Program SPP 1835 Cooperative Interacting Automobiles\"]\n__version__ = \"1.1.2\"\n__maintainer__ = \"Benjamin Orthen\"\n__email__ = \"[email protected]\"\n__status__ = \"Released\"\n\nfrom typing import List, Tuple\n\nimport numpy as np\nfrom pyproj import Proj\nfrom commonroad.scenario.lanelet import Lanelet\n\nfrom opendrive2lanelet.osm.osm import OSM, Node, Way, WayRelation, DEFAULT_PROJ_STRING\n\nways_are_equal_tolerance = 0.001\n\n\nclass L2OSMConverter:\n \"\"\"Class to convert CommonRoad lanelet to the OSM representation.\"\"\"\n\n def __init__(self, proj_string):\n if proj_string:\n self.proj = Proj(proj_string)\n else:\n self.proj = Proj(DEFAULT_PROJ_STRING)\n self.osm = None\n self._id_count = -1\n self.first_nodes, self.last_nodes = None, None\n self.left_ways, self.right_ways = None, None\n self.lanelet_network = None\n\n @property\n def id_count(self) -> int:\n \"\"\"Internal counter for giving IDs to the members of the OSM.\n\n Each call returns the count and increases it by one.\n Returns:\n Current id count.\n \"\"\"\n tmp = self._id_count\n self._id_count -= 1\n return tmp\n\n def __call__(self, scenario):\n \"\"\"Convert a scenario to an OSM xml document.\n\n Args:\n scenario:\n \"\"\"\n self.osm = OSM()\n self.lanelet_network = scenario.lanelet_network\n self.first_nodes = dict() # saves first left and right node\n self.last_nodes = dict() # saves last left and right node\n self.left_ways = dict()\n self.right_ways = dict()\n for lanelet in scenario.lanelet_network.lanelets:\n self._convert_lanelet(lanelet)\n\n return self.osm.serialize_to_xml()\n\n def _convert_lanelet(self, lanelet: Lanelet):\n \"\"\"Convert a lanelet to a way relation.\n\n Add the resulting relation and its ways and nodes to the OSM.\n\n Args:\n lanelet: Lanelet to be converted.\n \"\"\"\n\n # check if there are shared ways\n right_way_id = self._get_potential_right_way(lanelet)\n left_way_id = self._get_potential_left_way(lanelet)\n\n left_nodes, right_nodes = self._create_nodes(lanelet, left_way_id, right_way_id)\n\n self.first_nodes[lanelet.lanelet_id] = (left_nodes[0], right_nodes[0])\n self.last_nodes[lanelet.lanelet_id] = (left_nodes[-1], right_nodes[-1])\n\n if not left_way_id:\n left_way = Way(self.id_count, *left_nodes)\n self.osm.add_way(left_way)\n left_way_id = left_way.id_\n if not right_way_id:\n right_way = Way(self.id_count, *right_nodes)\n self.osm.add_way(right_way)\n right_way_id = right_way.id_\n\n self.left_ways[lanelet.lanelet_id] = left_way_id\n self.right_ways[lanelet.lanelet_id] = right_way_id\n self.osm.add_way_relation(WayRelation(self.id_count, left_way_id, right_way_id))\n\n def _create_nodes(\n self, lanelet: Lanelet, left_way_id: str, right_way_id: str\n ) -> Tuple[List[str], List[str]]:\n \"\"\"Create new nodes for the ways of the lanelet.\n Add them to OSM and return a list of the node ids.\n\n In case a left or right way already exists, the returned list\n only contains the first and last node of the way.\n Args:\n lanelet: Lanelet of which the right and left vertices should be converted to ways.\n left_way_id: Id of a potential shared left way which was already converted.\n If this is not None, the left vertices of the lanelet do not have to be converted again.\n right_way_id: Id of a potential right way, similar to left_way_id.\n Returns:\n A tuple of lists of node ids for the left and the right way.\n \"\"\"\n left_nodes, right_nodes = [], []\n start_index = 0\n end_index = len(lanelet.left_vertices)\n pot_first_left_node, pot_first_right_node = self._get_shared_first_nodes_from_other_lanelets(\n lanelet\n )\n pot_last_left_node, pot_last_right_node = self._get_shared_last_nodes_from_other_lanelets(\n lanelet\n )\n if pot_first_left_node:\n start_index = 1\n if pot_last_left_node:\n end_index = -1\n\n if left_way_id:\n first_left_node, last_left_node = self._get_first_and_last_nodes_from_way(\n left_way_id, lanelet.adj_left_same_direction\n )\n else:\n first_left_node = pot_first_left_node\n last_left_node = pot_last_left_node\n left_nodes = self._create_nodes_from_vertices(\n lanelet.left_vertices[start_index:end_index]\n )\n if right_way_id:\n first_right_node, last_right_node = self._get_first_and_last_nodes_from_way(\n right_way_id, lanelet.adj_right_same_direction\n )\n else:\n first_right_node = pot_first_right_node\n last_right_node = pot_last_right_node\n right_nodes = self._create_nodes_from_vertices(\n lanelet.right_vertices[start_index:end_index]\n )\n\n if first_left_node:\n left_nodes.insert(0, first_left_node)\n if first_right_node:\n right_nodes.insert(0, first_right_node)\n\n if last_left_node:\n left_nodes.append(last_left_node)\n if last_right_node:\n right_nodes.append(last_right_node)\n\n return left_nodes, right_nodes\n\n def _get_first_and_last_nodes_from_way(\n self, way_id: str, same_dir: bool\n ) -> Tuple[str, str]:\n \"\"\"Get the first and the last node of a way.\n\n Reverse order of nodes if way is reversed.\n Args:\n way_id: Id of way.\n same_dir: True if way is in normal direction, False if it is reversed.\n Returns:\n Tuple with first and last node.\n \"\"\"\n way = self.osm.find_way_by_id(way_id)\n first_idx, last_idx = (0, -1) if same_dir else (-1, 0)\n return (way.nodes[first_idx], way.nodes[last_idx])\n\n def _create_nodes_from_vertices(self, vertices: List[np.ndarray]) -> List[str]:\n \"\"\"Create nodes and add them to the OSM.\n\n Args:\n vertices: List of vertices from a lanelet boundary.\n Returns:\n Ids of nodes which were created.\n \"\"\"\n nodes = []\n for vertice in vertices:\n lon, lat = self.proj(vertice[0], vertice[1], inverse=True)\n node = Node(self.id_count, lat, lon)\n nodes.append(node.id_)\n self.osm.add_node(node)\n return nodes\n\n def _get_potential_right_way(self, lanelet):\n \"\"\"Check if a shared right boundary with another lanelet can be transformed\n to the same way.\n\n Args:\n lanelet: Lanelet of which right boundary should be converted to a way.\n Returns:\n Id of a way which can be shared, else None if it is not possible.\n \"\"\"\n if lanelet.adj_right:\n if lanelet.adj_right_same_direction:\n potential_right_way = self.left_ways.get(lanelet.adj_right)\n else:\n potential_right_way = self.right_ways.get(lanelet.adj_right)\n if potential_right_way:\n adj_right = self.lanelet_network.find_lanelet_by_id(lanelet.adj_right)\n vertices = (\n adj_right.left_vertices\n if lanelet.adj_right_same_direction\n else adj_right.right_vertices[::-1]\n )\n if _vertices_are_equal(lanelet.right_vertices, vertices):\n return potential_right_way\n\n return None\n\n def _get_potential_left_way(self, lanelet):\n \"\"\"Check if a shared left boundary with another lanelet can be transformed\n to the same way.\n\n Args:\n lanelet: Lanelet of which left boundary should be converted to a way.\n Returns:\n Id of a way which can be shared, else None if it is not possible.\n \"\"\"\n if lanelet.adj_left:\n if lanelet.adj_left_same_direction:\n potential_left_way = self.right_ways.get(lanelet.adj_left)\n else:\n potential_left_way = self.left_ways.get(lanelet.adj_left)\n if potential_left_way:\n adj_left = self.lanelet_network.find_lanelet_by_id(lanelet.adj_left)\n vertices = (\n adj_left.right_vertices\n if lanelet.adj_left_same_direction\n else adj_left.left_vertices[::-1]\n )\n if _vertices_are_equal(lanelet.left_vertices, vertices):\n return potential_left_way\n\n return None\n\n def _get_shared_first_nodes_from_other_lanelets(\n self, lanelet: Lanelet\n ) -> Tuple[str, str]:\n \"\"\"Get already created nodes from other lanelets which could also\n be used by this lanelet as first nodes.\n\n Args:\n lanelet: Lanelet for which shared nodes should be found.\n Returns:\n Id of first left and first right node if they exist.\n \"\"\"\n if lanelet.predecessor:\n for lanelet_id in lanelet.predecessor:\n first_left_node, first_right_node = self.last_nodes.get(\n lanelet_id, (None, None)\n )\n if first_left_node:\n return first_left_node, first_right_node\n for pred_id in lanelet.predecessor:\n pred = self.lanelet_network.find_lanelet_by_id(pred_id)\n for succ_id in pred.successor:\n first_left_node, first_right_node = self.first_nodes.get(\n succ_id, (None, None)\n )\n if first_left_node:\n return first_left_node, first_right_node\n return None, None\n\n def _get_shared_last_nodes_from_other_lanelets(\n self, lanelet: Lanelet\n ) -> Tuple[str, str]:\n \"\"\"Get already created nodes from other lanelets which could also\n be used by this lanelet as last nodes.\n\n Args:\n lanelet: Lanelet for which shared nodes should be found.\n Returns:\n Id of last left and last right node if they exist.\n \"\"\"\n if lanelet.successor:\n for lanelet_id in lanelet.successor:\n last_left_node, last_right_node = self.first_nodes.get(\n lanelet_id, (None, None)\n )\n if last_left_node:\n return last_left_node, last_right_node\n for succ_id in lanelet.successor:\n succ = self.lanelet_network.find_lanelet_by_id(succ_id)\n for pred_id in succ.predecessor:\n last_left_node, last_right_node = self.last_nodes.get(\n pred_id, (None, None)\n )\n if last_left_node:\n return last_left_node, last_right_node\n\n return None, None\n\n\ndef _vertices_are_equal(\n vertices1: List[np.ndarray], vertices2: List[np.ndarray]\n) -> bool:\n \"\"\"Checks if two list of vertices are equal up to a tolerance.\n\n Args:\n vertices1: First vertices to compare.\n vertices2: Second vertices to compare.\n\n Returns:\n True if every vertice in one list is nearly equal to the\n corresponding vertices at the same position in the other list.\n \"\"\"\n if len(vertices1) != len(vertices2):\n return False\n diff = vertices1 - vertices2\n if np.abs(np.max(diff)) < ways_are_equal_tolerance:\n return True\n return False\n",
"step-ids": [
7,
8,
10,
16,
17
]
}
|
[
7,
8,
10,
16,
17
] |
# Game Tebak Angka
from random import randint
nyawa = 3
angka_rahasia = randint(0,10)
limit = 0
print(f"Selamat datang di Game Tebak angka")
while nyawa > limit:
print(f"Percobaan anda tersisa {nyawa}")
jawaban = int(input("Masukkan angka 0-10 = "))
if jawaban == angka_rahasia:
print ("Anda Benar")
break
elif nyawa-1 == limit:
print ("Anda Gagal")
break
elif jawaban > angka_rahasia:
print("Lebih")
nyawa -= 1
elif jawaban < angka_rahasia:
print("Kurang")
nyawa -= 1
"""# Game Tebak Angka
from random import randint
# Mengimpor Library Random untuk membuat angka rahasia secara acak
nyawa = 3 # Jumlah percobaan yang di berikan
angka_rahasia = randint(0,10) # Angka rahasia sebagai jawaban game
limit = 0 # Batas nyawa jika nyawa jadi 0 maka pemain akan gagal
print(f"Selamat datang di Game Tebak angka")
while nyawa > limit:
# ini menandakan bahwa game akan berjalan
# jika nyawa lebih besar dari limit
print(f"Percobaan anda tersisa {nyawa}")
# ini untuk memberitahukan pemain jumlah nyawa yang mereka miliki
jawaban = int(input("Masukkan angka 0-10 = "))
# ini untuk menerima angka tebakan dari pemain
if jawaban == angka_rahasia:
print ("Anda Benar")
break
# ini untuk memeriksa apakah angka yang
# di masukan pemain sama dengan angka rahasia
elif nyawa-1 == limit:
print ("Anda Gagal")
break
# Jika jawabannya salah maka nyawanya akan di periksa di sini jika
# nyawanya sudah mencapai limit maka game nya akan selesai
# dan pemain akan kalah
elif jawaban > angka_rahasia:
print("Lebih")
nyawa -= 1
elif jawaban < angka_rahasia:
print("Kurang")
nyawa -= 1
# ini untuk memberikan bantuan kepada pemain apakah angka yang di masukkan
# itu lebih besar atau kurang dari angka rahasia
"""
|
normal
|
{
"blob_id": "d4b01b015723950a4d8c3453d736cd64f306d27b",
"index": 2940,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(f'Selamat datang di Game Tebak angka')\nwhile nyawa > limit:\n print(f'Percobaan anda tersisa {nyawa}')\n jawaban = int(input('Masukkan angka 0-10 = '))\n if jawaban == angka_rahasia:\n print('Anda Benar')\n break\n elif nyawa - 1 == limit:\n print('Anda Gagal')\n break\n elif jawaban > angka_rahasia:\n print('Lebih')\n nyawa -= 1\n elif jawaban < angka_rahasia:\n print('Kurang')\n nyawa -= 1\n<mask token>\n",
"step-3": "<mask token>\nnyawa = 3\nangka_rahasia = randint(0, 10)\nlimit = 0\nprint(f'Selamat datang di Game Tebak angka')\nwhile nyawa > limit:\n print(f'Percobaan anda tersisa {nyawa}')\n jawaban = int(input('Masukkan angka 0-10 = '))\n if jawaban == angka_rahasia:\n print('Anda Benar')\n break\n elif nyawa - 1 == limit:\n print('Anda Gagal')\n break\n elif jawaban > angka_rahasia:\n print('Lebih')\n nyawa -= 1\n elif jawaban < angka_rahasia:\n print('Kurang')\n nyawa -= 1\n<mask token>\n",
"step-4": "from random import randint\nnyawa = 3\nangka_rahasia = randint(0, 10)\nlimit = 0\nprint(f'Selamat datang di Game Tebak angka')\nwhile nyawa > limit:\n print(f'Percobaan anda tersisa {nyawa}')\n jawaban = int(input('Masukkan angka 0-10 = '))\n if jawaban == angka_rahasia:\n print('Anda Benar')\n break\n elif nyawa - 1 == limit:\n print('Anda Gagal')\n break\n elif jawaban > angka_rahasia:\n print('Lebih')\n nyawa -= 1\n elif jawaban < angka_rahasia:\n print('Kurang')\n nyawa -= 1\n<mask token>\n",
"step-5": "# Game Tebak Angka \r\n\r\nfrom random import randint \r\n\r\nnyawa = 3 \r\nangka_rahasia = randint(0,10) \r\nlimit = 0 \r\n\r\nprint(f\"Selamat datang di Game Tebak angka\")\r\nwhile nyawa > limit: \r\n print(f\"Percobaan anda tersisa {nyawa}\")\r\n jawaban = int(input(\"Masukkan angka 0-10 = \"))\r\n if jawaban == angka_rahasia:\r\n print (\"Anda Benar\")\r\n break \r\n elif nyawa-1 == limit:\r\n print (\"Anda Gagal\")\r\n break\r\n elif jawaban > angka_rahasia:\r\n print(\"Lebih\")\r\n nyawa -= 1\r\n elif jawaban < angka_rahasia:\r\n print(\"Kurang\")\r\n nyawa -= 1\r\n\r\n\r\n\r\n\r\n\"\"\"# Game Tebak Angka \r\nfrom random import randint \r\n# Mengimpor Library Random untuk membuat angka rahasia secara acak\r\nnyawa = 3 # Jumlah percobaan yang di berikan\r\nangka_rahasia = randint(0,10) # Angka rahasia sebagai jawaban game\r\nlimit = 0 # Batas nyawa jika nyawa jadi 0 maka pemain akan gagal\r\n\r\nprint(f\"Selamat datang di Game Tebak angka\")\r\nwhile nyawa > limit: \r\n# ini menandakan bahwa game akan berjalan \r\n# jika nyawa lebih besar dari limit\r\n print(f\"Percobaan anda tersisa {nyawa}\")\r\n# ini untuk memberitahukan pemain jumlah nyawa yang mereka miliki \r\n jawaban = int(input(\"Masukkan angka 0-10 = \"))\r\n# ini untuk menerima angka tebakan dari pemain\r\n if jawaban == angka_rahasia:\r\n print (\"Anda Benar\")\r\n break\r\n# ini untuk memeriksa apakah angka yang \r\n# di masukan pemain sama dengan angka rahasia \r\n elif nyawa-1 == limit:\r\n print (\"Anda Gagal\")\r\n break\r\n# Jika jawabannya salah maka nyawanya akan di periksa di sini jika \r\n# nyawanya sudah mencapai limit maka game nya akan selesai \r\n# dan pemain akan kalah\r\n elif jawaban > angka_rahasia:\r\n print(\"Lebih\")\r\n nyawa -= 1\r\n elif jawaban < angka_rahasia:\r\n print(\"Kurang\")\r\n nyawa -= 1\r\n# ini untuk memberikan bantuan kepada pemain apakah angka yang di masukkan\r\n# itu lebih besar atau kurang dari angka rahasia\r\n\"\"\"\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from collections import deque
'''
Big O
เวลาเรียก queue จะมี2operation 1deque 2enqueue เวลาเอาไปใช้
อยู่ที่การimplementation
โปรแกรมที่ดี 1.ทำงานถูกต้อง 2.ทันใจ 3.ทรัพยากรที่ใช้รันได้ทุกเครื่อง(specคอมกาก)
4.ทำงานได้ตามต้องการ5.ความเสถียรของระบบ 6.Bugs
แพง คือ memory expansive ใช้หน่วยความจำเยอะ
runtime expensive ใช้เวลาเยอะ
เลยเกิด queue linklist
โดยแต่ละอย่าง
- linklist มีcost มาเกี่ยว
- dequeue ใช้ O(1) มันขึ้นกับว่าจำหน่วยตัวข้างในมี10ตัวใช้ 1ms 1ล้านตัวก็ใช้ 1ms
เรียกความเร็วคงที่ อีกชื่อ O(1) โอวัน โอหนึ่ง แต่มันในอุดมคติ
เวลาใใช้ linklist เก็บตัวชี้และ ข้อมูล มันเลยใช้ หน่วยความจำเป็น2เท่าของ list
Big O คือการวิเคราะห์ runing time complexityเปรียบเทียบสองตัวว่าตัวไหนมีประสิทธิภาพดีกว่า
แต่Big O ที่ดีกว่าไม่ได้เร็วกว่า เพราะ ขึ้นอยุกับ ความเร็วspecเครื่อง
n T(n)
1 1ms
10 10ms
1M 1000s
T(N)ผันตามn เรียก O(n)
อีกเคส
n T(n)
1 1
10 100
1M 1Ms
T(N) ผันตาม n^2,n^3,n! จะใช้เวลาเยอะมาก
เช่น ให้ทาย อันไหนเร็วสุด
1. O(1) อันดับ1
2. O(n) อันดับ3
3. O(n^2) อันดับ4
4. O(logn) อันดับ2
เวลาใช้ linklist จะมี3ขั้นตอนในการเชื่อม 1.สร้างnodeใหม่ 2.ลิ้งข้อมูลอันเก่ากะอันใหม่ 3.ลิ้งส่วนfront
radix sort ดูค่าในแต่ละหลัก
1.รับ input เก็บไว้ในqueue
2.หยิบตัวแรกออกไป
3.มันจะหาว่าตัวไหนmax และมีกี่หลัก
4.จะมีการเทียบ3รอบ รอบที่1 เอาข้อมูลที่ดึงออกมา เก็บไว้ตามหลักในรอบนั้นๆเช่น 64 เลขหลักหน่วยตรงกับหลัก4 ก้เก่บไว้ที่4
'''
class Queue:
def __init__(self):
self.items=deque()
def enQueue(self,i):
self.items.append(i)
def deQueue(self):
return self.items.popleft()
def isEmpty(self):
return len(self.items)==0
def size(self):
return len(self.items)
'''class Queue():
def __init__(self,list=None):
if list==None:
self.items=[]
else:
self.items=list
def enQueue(self,i):
self.items.append(i)
def deQueue(self):
self.items.pop(0)
def isQEmpty(self):
return len(self.items)==0
def size(self):
return len(self.items)
'''
if __name__== '__main__':
q=Queue()
print(q.items)
q.enQueue('A')
print(q.items)
q.deQueue()
print(q.items)
print(q.isEmpty())
|
normal
|
{
"blob_id": "c96a64573fc6cc207ee09be4f4b183d065736ff6",
"index": 5442,
"step-1": "<mask token>\n\n\nclass Queue:\n\n def __init__(self):\n self.items = deque()\n\n def enQueue(self, i):\n self.items.append(i)\n\n def deQueue(self):\n return self.items.popleft()\n\n def isEmpty(self):\n return len(self.items) == 0\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Queue:\n\n def __init__(self):\n self.items = deque()\n\n def enQueue(self, i):\n self.items.append(i)\n\n def deQueue(self):\n return self.items.popleft()\n\n def isEmpty(self):\n return len(self.items) == 0\n\n def size(self):\n return len(self.items)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Queue:\n\n def __init__(self):\n self.items = deque()\n\n def enQueue(self, i):\n self.items.append(i)\n\n def deQueue(self):\n return self.items.popleft()\n\n def isEmpty(self):\n return len(self.items) == 0\n\n def size(self):\n return len(self.items)\n\n\n<mask token>\nif __name__ == '__main__':\n q = Queue()\n print(q.items)\n q.enQueue('A')\n print(q.items)\n q.deQueue()\n print(q.items)\n print(q.isEmpty())\n",
"step-4": "from collections import deque\n<mask token>\n\n\nclass Queue:\n\n def __init__(self):\n self.items = deque()\n\n def enQueue(self, i):\n self.items.append(i)\n\n def deQueue(self):\n return self.items.popleft()\n\n def isEmpty(self):\n return len(self.items) == 0\n\n def size(self):\n return len(self.items)\n\n\n<mask token>\nif __name__ == '__main__':\n q = Queue()\n print(q.items)\n q.enQueue('A')\n print(q.items)\n q.deQueue()\n print(q.items)\n print(q.isEmpty())\n",
"step-5": "from collections import deque\r\n'''\r\nBig O \r\nเวลาเรียก queue จะมี2operation 1deque 2enqueue เวลาเอาไปใช้\r\nอยู่ที่การimplementation\r\nโปรแกรมที่ดี 1.ทำงานถูกต้อง 2.ทันใจ 3.ทรัพยากรที่ใช้รันได้ทุกเครื่อง(specคอมกาก)\r\n4.ทำงานได้ตามต้องการ5.ความเสถียรของระบบ 6.Bugs\r\n\r\nแพง คือ memory expansive ใช้หน่วยความจำเยอะ\r\n\truntime expensive ใช้เวลาเยอะ\r\nเลยเกิด queue linklist\r\nโดยแต่ละอย่าง\r\n- linklist มีcost มาเกี่ยว \r\n- dequeue ใช้ O(1) มันขึ้นกับว่าจำหน่วยตัวข้างในมี10ตัวใช้ 1ms 1ล้านตัวก็ใช้ 1ms\r\nเรียกความเร็วคงที่ อีกชื่อ O(1) โอวัน โอหนึ่ง แต่มันในอุดมคติ\r\nเวลาใใช้ linklist เก็บตัวชี้และ ข้อมูล มันเลยใช้ หน่วยความจำเป็น2เท่าของ list\r\n\r\nBig O คือการวิเคราะห์ runing time complexityเปรียบเทียบสองตัวว่าตัวไหนมีประสิทธิภาพดีกว่า\r\nแต่Big O ที่ดีกว่าไม่ได้เร็วกว่า เพราะ ขึ้นอยุกับ ความเร็วspecเครื่อง\r\nn T(n)\r\n1 1ms\r\n10 10ms\r\n1M 1000s\r\nT(N)ผันตามn เรียก O(n)\r\nอีกเคส\r\nn T(n)\r\n1 1\r\n10 100\r\n1M 1Ms\r\nT(N) ผันตาม n^2,n^3,n! จะใช้เวลาเยอะมาก\r\n\r\nเช่น ให้ทาย อันไหนเร็วสุด\r\n1. O(1)\tอันดับ1\r\n2. O(n)\tอันดับ3\r\n3. O(n^2)\tอันดับ4\r\n4. O(logn)\tอันดับ2\r\n\r\n\r\n\r\nเวลาใช้ linklist จะมี3ขั้นตอนในการเชื่อม 1.สร้างnodeใหม่ 2.ลิ้งข้อมูลอันเก่ากะอันใหม่ 3.ลิ้งส่วนfront\r\n\r\nradix sort ดูค่าในแต่ละหลัก\r\n1.รับ input เก็บไว้ในqueue\r\n2.หยิบตัวแรกออกไป\r\n3.มันจะหาว่าตัวไหนmax และมีกี่หลัก\r\n4.จะมีการเทียบ3รอบ รอบที่1 เอาข้อมูลที่ดึงออกมา เก็บไว้ตามหลักในรอบนั้นๆเช่น 64 เลขหลักหน่วยตรงกับหลัก4 ก้เก่บไว้ที่4\r\n'''\r\nclass Queue:\r\n def __init__(self):\r\n self.items=deque()\r\n def enQueue(self,i):\r\n self.items.append(i)\r\n def deQueue(self):\r\n return self.items.popleft()\r\n def isEmpty(self):\r\n return len(self.items)==0\r\n def size(self):\r\n return len(self.items)\r\n'''class Queue(): \r\n def __init__(self,list=None):\r\n if list==None:\r\n self.items=[]\r\n else:\r\n self.items=list\r\n \r\n def enQueue(self,i):\r\n self.items.append(i)\r\n def deQueue(self):\r\n self.items.pop(0)\r\n def isQEmpty(self):\r\n return len(self.items)==0\r\n def size(self):\r\n return len(self.items)\r\n'''\r\nif __name__== '__main__':\r\n q=Queue()\r\n print(q.items)\r\n q.enQueue('A')\r\n print(q.items)\r\n q.deQueue()\r\n print(q.items)\r\n print(q.isEmpty())\r\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('users', '0004_auto_20200720_0154')]
operations = [migrations.DeleteModel(name='Report'), migrations.
AlterField(model_name='registered', name='Email', field=models.
EmailField(max_length=254, null=True)), migrations.AlterField(
model_name='registered', name='Password', field=models.CharField(
max_length=50, null=True)), migrations.AlterField(model_name=
'registered', name='Username', field=models.CharField(max_length=70,
null=True))]
<|reserved_special_token_1|>
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('users', '0004_auto_20200720_0154')]
operations = [migrations.DeleteModel(name='Report'), migrations.
AlterField(model_name='registered', name='Email', field=models.
EmailField(max_length=254, null=True)), migrations.AlterField(
model_name='registered', name='Password', field=models.CharField(
max_length=50, null=True)), migrations.AlterField(model_name=
'registered', name='Username', field=models.CharField(max_length=70,
null=True))]
<|reserved_special_token_1|>
# Generated by Django 3.0.4 on 2020-07-20 00:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0004_auto_20200720_0154'),
]
operations = [
migrations.DeleteModel(
name='Report',
),
migrations.AlterField(
model_name='registered',
name='Email',
field=models.EmailField(max_length=254, null=True),
),
migrations.AlterField(
model_name='registered',
name='Password',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='registered',
name='Username',
field=models.CharField(max_length=70, null=True),
),
]
|
flexible
|
{
"blob_id": "98bc6e0552991d7de1cc29a02242b25e7919ef82",
"index": 3764,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('users', '0004_auto_20200720_0154')]\n operations = [migrations.DeleteModel(name='Report'), migrations.\n AlterField(model_name='registered', name='Email', field=models.\n EmailField(max_length=254, null=True)), migrations.AlterField(\n model_name='registered', name='Password', field=models.CharField(\n max_length=50, null=True)), migrations.AlterField(model_name=\n 'registered', name='Username', field=models.CharField(max_length=70,\n null=True))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('users', '0004_auto_20200720_0154')]\n operations = [migrations.DeleteModel(name='Report'), migrations.\n AlterField(model_name='registered', name='Email', field=models.\n EmailField(max_length=254, null=True)), migrations.AlterField(\n model_name='registered', name='Password', field=models.CharField(\n max_length=50, null=True)), migrations.AlterField(model_name=\n 'registered', name='Username', field=models.CharField(max_length=70,\n null=True))]\n",
"step-5": "# Generated by Django 3.0.4 on 2020-07-20 00:05\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('users', '0004_auto_20200720_0154'),\n ]\n\n operations = [\n migrations.DeleteModel(\n name='Report',\n ),\n migrations.AlterField(\n model_name='registered',\n name='Email',\n field=models.EmailField(max_length=254, null=True),\n ),\n migrations.AlterField(\n model_name='registered',\n name='Password',\n field=models.CharField(max_length=50, null=True),\n ),\n migrations.AlterField(\n model_name='registered',\n name='Username',\n field=models.CharField(max_length=70, null=True),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class NetworkLookup:
def __init__(self):
self.loaded = 0
self.subnets = {}
self.vpcs = {}
def load(self):
if self.loaded:
return
client = boto3.client('ec2')
subnets_r = client.describe_subnets()
subnets_list = subnets_r['Subnets']
while 'NextToken' in subnets_r:
subnets_r = client.get_subnets(NextToken=subnets_r['NextToken'])
subnets_list.extend(subnets_r['Subnets'])
for subnet in subnets_list:
name = None
if 'Tags' in subnet:
for tag in subnet['Tags']:
if tag['Key'] == 'Name':
name = tag['Value']
if name is not None:
self.subnets[name] = subnet['SubnetId']
vpcs_r = client.describe_vpcs()
vpcs_list = vpcs_r['Vpcs']
while 'NextToken' in vpcs_r:
vpcs_r = client.describe_vpcs(NextToken=vpcs_r['NextToken'])
vpcs_list.extend(vpcs_r['Subnets'])
for vpc in vpcs_list:
name = None
if 'Tags' in vpc:
for tag in vpc['Tags']:
if tag['Key'] == 'Name':
name = tag['Value']
if name is not None:
self.vpcs[name] = vpc['VpcId']
def get_subnets(self, environment_name, subnetname):
self.load()
return list(map(lambda x: self.subnets[x], filter(lambda x: x.
startswith(f'{environment_name}{subnetname}'), self.subnets)))
<|reserved_special_token_0|>
def replace_vpc(value, parameters):
if isinstance(value, str) and value.startswith('CfHl.Vpc'):
nl.load()
parts = value.split('.')
environment_name = parameters['EnvironmentName']
if len(parts) == 3:
prop = parts[2]
if prop == 'Id':
vpcs = nl.vpcs
if f'{environment_name}-vpc' in vpcs:
return vpcs[f'{environment_name}-vpc']
return value
def replace_network(value, parameters):
value = replace_subnets(value, parameters)
value = replace_vpc(value, parameters)
return value
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class NetworkLookup:
def __init__(self):
self.loaded = 0
self.subnets = {}
self.vpcs = {}
def load(self):
if self.loaded:
return
client = boto3.client('ec2')
subnets_r = client.describe_subnets()
subnets_list = subnets_r['Subnets']
while 'NextToken' in subnets_r:
subnets_r = client.get_subnets(NextToken=subnets_r['NextToken'])
subnets_list.extend(subnets_r['Subnets'])
for subnet in subnets_list:
name = None
if 'Tags' in subnet:
for tag in subnet['Tags']:
if tag['Key'] == 'Name':
name = tag['Value']
if name is not None:
self.subnets[name] = subnet['SubnetId']
vpcs_r = client.describe_vpcs()
vpcs_list = vpcs_r['Vpcs']
while 'NextToken' in vpcs_r:
vpcs_r = client.describe_vpcs(NextToken=vpcs_r['NextToken'])
vpcs_list.extend(vpcs_r['Subnets'])
for vpc in vpcs_list:
name = None
if 'Tags' in vpc:
for tag in vpc['Tags']:
if tag['Key'] == 'Name':
name = tag['Value']
if name is not None:
self.vpcs[name] = vpc['VpcId']
def get_subnets(self, environment_name, subnetname):
self.load()
return list(map(lambda x: self.subnets[x], filter(lambda x: x.
startswith(f'{environment_name}{subnetname}'), self.subnets)))
<|reserved_special_token_0|>
def replace_subnets(value, parameters):
if isinstance(value, str) and value.startswith('CfHl.Subnet'):
parts = value.split('.')
if len(parts) == 3:
subnet_class = parts[2]
environment_name = parameters['EnvironmentName']
subnets = nl.get_subnets(environment_name, subnet_class)
if parts[1] == 'Subnets':
return subnets
elif parts[1] == 'Subnet':
if subnets:
return subnets[0]
return value
def replace_vpc(value, parameters):
if isinstance(value, str) and value.startswith('CfHl.Vpc'):
nl.load()
parts = value.split('.')
environment_name = parameters['EnvironmentName']
if len(parts) == 3:
prop = parts[2]
if prop == 'Id':
vpcs = nl.vpcs
if f'{environment_name}-vpc' in vpcs:
return vpcs[f'{environment_name}-vpc']
return value
def replace_network(value, parameters):
value = replace_subnets(value, parameters)
value = replace_vpc(value, parameters)
return value
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class NetworkLookup:
def __init__(self):
self.loaded = 0
self.subnets = {}
self.vpcs = {}
def load(self):
if self.loaded:
return
client = boto3.client('ec2')
subnets_r = client.describe_subnets()
subnets_list = subnets_r['Subnets']
while 'NextToken' in subnets_r:
subnets_r = client.get_subnets(NextToken=subnets_r['NextToken'])
subnets_list.extend(subnets_r['Subnets'])
for subnet in subnets_list:
name = None
if 'Tags' in subnet:
for tag in subnet['Tags']:
if tag['Key'] == 'Name':
name = tag['Value']
if name is not None:
self.subnets[name] = subnet['SubnetId']
vpcs_r = client.describe_vpcs()
vpcs_list = vpcs_r['Vpcs']
while 'NextToken' in vpcs_r:
vpcs_r = client.describe_vpcs(NextToken=vpcs_r['NextToken'])
vpcs_list.extend(vpcs_r['Subnets'])
for vpc in vpcs_list:
name = None
if 'Tags' in vpc:
for tag in vpc['Tags']:
if tag['Key'] == 'Name':
name = tag['Value']
if name is not None:
self.vpcs[name] = vpc['VpcId']
def get_subnets(self, environment_name, subnetname):
self.load()
return list(map(lambda x: self.subnets[x], filter(lambda x: x.
startswith(f'{environment_name}{subnetname}'), self.subnets)))
nl = NetworkLookup()
def replace_subnets(value, parameters):
if isinstance(value, str) and value.startswith('CfHl.Subnet'):
parts = value.split('.')
if len(parts) == 3:
subnet_class = parts[2]
environment_name = parameters['EnvironmentName']
subnets = nl.get_subnets(environment_name, subnet_class)
if parts[1] == 'Subnets':
return subnets
elif parts[1] == 'Subnet':
if subnets:
return subnets[0]
return value
def replace_vpc(value, parameters):
if isinstance(value, str) and value.startswith('CfHl.Vpc'):
nl.load()
parts = value.split('.')
environment_name = parameters['EnvironmentName']
if len(parts) == 3:
prop = parts[2]
if prop == 'Id':
vpcs = nl.vpcs
if f'{environment_name}-vpc' in vpcs:
return vpcs[f'{environment_name}-vpc']
return value
def replace_network(value, parameters):
value = replace_subnets(value, parameters)
value = replace_vpc(value, parameters)
return value
if __name__ == '__main__':
print(replace_network('CfHl.Subnets.Public', {'EnvironmentName': 'dev'}))
print(replace_network('CfHl.Subnet.Public0', {'EnvironmentName': 'dev'}))
print(replace_network('CfHl.Vpc.Id', {'EnvironmentName': 'dev'}))
<|reserved_special_token_1|>
import boto3
class NetworkLookup:
def __init__(self):
self.loaded = 0
self.subnets = {}
self.vpcs = {}
def load(self):
if self.loaded:
return
client = boto3.client('ec2')
subnets_r = client.describe_subnets()
subnets_list = subnets_r['Subnets']
while 'NextToken' in subnets_r:
subnets_r = client.get_subnets(NextToken=subnets_r['NextToken'])
subnets_list.extend(subnets_r['Subnets'])
for subnet in subnets_list:
name = None
if 'Tags' in subnet:
for tag in subnet['Tags']:
if tag['Key'] == 'Name':
name = tag['Value']
if name is not None:
self.subnets[name] = subnet['SubnetId']
vpcs_r = client.describe_vpcs()
vpcs_list = vpcs_r['Vpcs']
while 'NextToken' in vpcs_r:
vpcs_r = client.describe_vpcs(NextToken=vpcs_r['NextToken'])
vpcs_list.extend(vpcs_r['Subnets'])
for vpc in vpcs_list:
name = None
if 'Tags' in vpc:
for tag in vpc['Tags']:
if tag['Key'] == 'Name':
name = tag['Value']
if name is not None:
self.vpcs[name] = vpc['VpcId']
def get_subnets(self, environment_name, subnetname):
self.load()
return list(map(lambda x: self.subnets[x], filter(lambda x: x.
startswith(f'{environment_name}{subnetname}'), self.subnets)))
nl = NetworkLookup()
def replace_subnets(value, parameters):
if isinstance(value, str) and value.startswith('CfHl.Subnet'):
parts = value.split('.')
if len(parts) == 3:
subnet_class = parts[2]
environment_name = parameters['EnvironmentName']
subnets = nl.get_subnets(environment_name, subnet_class)
if parts[1] == 'Subnets':
return subnets
elif parts[1] == 'Subnet':
if subnets:
return subnets[0]
return value
def replace_vpc(value, parameters):
if isinstance(value, str) and value.startswith('CfHl.Vpc'):
nl.load()
parts = value.split('.')
environment_name = parameters['EnvironmentName']
if len(parts) == 3:
prop = parts[2]
if prop == 'Id':
vpcs = nl.vpcs
if f'{environment_name}-vpc' in vpcs:
return vpcs[f'{environment_name}-vpc']
return value
def replace_network(value, parameters):
value = replace_subnets(value, parameters)
value = replace_vpc(value, parameters)
return value
if __name__ == '__main__':
print(replace_network('CfHl.Subnets.Public', {'EnvironmentName': 'dev'}))
print(replace_network('CfHl.Subnet.Public0', {'EnvironmentName': 'dev'}))
print(replace_network('CfHl.Vpc.Id', {'EnvironmentName': 'dev'}))
<|reserved_special_token_1|>
import boto3
class NetworkLookup:
def __init__(self):
self.loaded = 0
self.subnets = {}
self.vpcs = {}
def load(self):
if self.loaded:
return
client = boto3.client('ec2')
# load subnets
subnets_r = client.describe_subnets()
subnets_list = subnets_r['Subnets']
while 'NextToken' in subnets_r:
subnets_r = client.get_subnets(NextToken=subnets_r['NextToken'])
subnets_list.extend(subnets_r['Subnets'])
for subnet in subnets_list:
name = None
if 'Tags' in subnet:
for tag in subnet['Tags']:
if tag['Key'] == 'Name':
name = tag['Value']
if name is not None:
self.subnets[name] = subnet['SubnetId']
# load vpcs
vpcs_r = client.describe_vpcs()
vpcs_list = vpcs_r['Vpcs']
while 'NextToken' in vpcs_r:
vpcs_r = client.describe_vpcs(NextToken=vpcs_r['NextToken'])
vpcs_list.extend(vpcs_r['Subnets'])
for vpc in vpcs_list:
name = None
if 'Tags' in vpc:
for tag in vpc['Tags']:
if tag['Key'] == 'Name':
name = tag['Value']
if name is not None:
self.vpcs[name] = vpc['VpcId']
def get_subnets(self, environment_name, subnetname):
self.load()
return list(map( lambda x: self.subnets[x] ,
filter(lambda x: x.startswith(f"{environment_name}{subnetname}"), self.subnets)
))
nl = NetworkLookup()
def replace_subnets(value, parameters):
if isinstance(value, str) and value.startswith('CfHl.Subnet'):
parts = value.split('.')
if len(parts) == 3:
subnet_class = parts[2]
environment_name = parameters['EnvironmentName']
subnets = nl.get_subnets(environment_name, subnet_class)
if parts[1] == 'Subnets':
return subnets
elif parts[1] == 'Subnet':
if subnets:
return subnets[0]
return value
def replace_vpc(value, parameters):
if isinstance(value, str) and value.startswith('CfHl.Vpc'):
nl.load()
parts = value.split('.')
environment_name = parameters['EnvironmentName']
if len(parts) == 3:
prop = parts[2]
if prop == 'Id':
vpcs = nl.vpcs
if f"{environment_name}-vpc" in vpcs:
return vpcs[f"{environment_name}-vpc"]
return value
def replace_network(value, parameters):
value = replace_subnets(value, parameters)
value = replace_vpc(value, parameters)
return value
if __name__ == '__main__':
print(replace_network('CfHl.Subnets.Public',{'EnvironmentName':'dev'}))
print(replace_network('CfHl.Subnet.Public0',{'EnvironmentName':'dev'}))
print(replace_network('CfHl.Vpc.Id',{'EnvironmentName':'dev'}))
|
flexible
|
{
"blob_id": "767c0e6d956701fcedddb153b6c47f404dec535a",
"index": 65,
"step-1": "<mask token>\n\n\nclass NetworkLookup:\n\n def __init__(self):\n self.loaded = 0\n self.subnets = {}\n self.vpcs = {}\n\n def load(self):\n if self.loaded:\n return\n client = boto3.client('ec2')\n subnets_r = client.describe_subnets()\n subnets_list = subnets_r['Subnets']\n while 'NextToken' in subnets_r:\n subnets_r = client.get_subnets(NextToken=subnets_r['NextToken'])\n subnets_list.extend(subnets_r['Subnets'])\n for subnet in subnets_list:\n name = None\n if 'Tags' in subnet:\n for tag in subnet['Tags']:\n if tag['Key'] == 'Name':\n name = tag['Value']\n if name is not None:\n self.subnets[name] = subnet['SubnetId']\n vpcs_r = client.describe_vpcs()\n vpcs_list = vpcs_r['Vpcs']\n while 'NextToken' in vpcs_r:\n vpcs_r = client.describe_vpcs(NextToken=vpcs_r['NextToken'])\n vpcs_list.extend(vpcs_r['Subnets'])\n for vpc in vpcs_list:\n name = None\n if 'Tags' in vpc:\n for tag in vpc['Tags']:\n if tag['Key'] == 'Name':\n name = tag['Value']\n if name is not None:\n self.vpcs[name] = vpc['VpcId']\n\n def get_subnets(self, environment_name, subnetname):\n self.load()\n return list(map(lambda x: self.subnets[x], filter(lambda x: x.\n startswith(f'{environment_name}{subnetname}'), self.subnets)))\n\n\n<mask token>\n\n\ndef replace_vpc(value, parameters):\n if isinstance(value, str) and value.startswith('CfHl.Vpc'):\n nl.load()\n parts = value.split('.')\n environment_name = parameters['EnvironmentName']\n if len(parts) == 3:\n prop = parts[2]\n if prop == 'Id':\n vpcs = nl.vpcs\n if f'{environment_name}-vpc' in vpcs:\n return vpcs[f'{environment_name}-vpc']\n return value\n\n\ndef replace_network(value, parameters):\n value = replace_subnets(value, parameters)\n value = replace_vpc(value, parameters)\n return value\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass NetworkLookup:\n\n def __init__(self):\n self.loaded = 0\n self.subnets = {}\n self.vpcs = {}\n\n def load(self):\n if self.loaded:\n return\n client = boto3.client('ec2')\n subnets_r = client.describe_subnets()\n subnets_list = subnets_r['Subnets']\n while 'NextToken' in subnets_r:\n subnets_r = client.get_subnets(NextToken=subnets_r['NextToken'])\n subnets_list.extend(subnets_r['Subnets'])\n for subnet in subnets_list:\n name = None\n if 'Tags' in subnet:\n for tag in subnet['Tags']:\n if tag['Key'] == 'Name':\n name = tag['Value']\n if name is not None:\n self.subnets[name] = subnet['SubnetId']\n vpcs_r = client.describe_vpcs()\n vpcs_list = vpcs_r['Vpcs']\n while 'NextToken' in vpcs_r:\n vpcs_r = client.describe_vpcs(NextToken=vpcs_r['NextToken'])\n vpcs_list.extend(vpcs_r['Subnets'])\n for vpc in vpcs_list:\n name = None\n if 'Tags' in vpc:\n for tag in vpc['Tags']:\n if tag['Key'] == 'Name':\n name = tag['Value']\n if name is not None:\n self.vpcs[name] = vpc['VpcId']\n\n def get_subnets(self, environment_name, subnetname):\n self.load()\n return list(map(lambda x: self.subnets[x], filter(lambda x: x.\n startswith(f'{environment_name}{subnetname}'), self.subnets)))\n\n\n<mask token>\n\n\ndef replace_subnets(value, parameters):\n if isinstance(value, str) and value.startswith('CfHl.Subnet'):\n parts = value.split('.')\n if len(parts) == 3:\n subnet_class = parts[2]\n environment_name = parameters['EnvironmentName']\n subnets = nl.get_subnets(environment_name, subnet_class)\n if parts[1] == 'Subnets':\n return subnets\n elif parts[1] == 'Subnet':\n if subnets:\n return subnets[0]\n return value\n\n\ndef replace_vpc(value, parameters):\n if isinstance(value, str) and value.startswith('CfHl.Vpc'):\n nl.load()\n parts = value.split('.')\n environment_name = parameters['EnvironmentName']\n if len(parts) == 3:\n prop = parts[2]\n if prop == 'Id':\n vpcs = nl.vpcs\n if f'{environment_name}-vpc' in vpcs:\n return vpcs[f'{environment_name}-vpc']\n return value\n\n\ndef replace_network(value, parameters):\n value = replace_subnets(value, parameters)\n value = replace_vpc(value, parameters)\n return value\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass NetworkLookup:\n\n def __init__(self):\n self.loaded = 0\n self.subnets = {}\n self.vpcs = {}\n\n def load(self):\n if self.loaded:\n return\n client = boto3.client('ec2')\n subnets_r = client.describe_subnets()\n subnets_list = subnets_r['Subnets']\n while 'NextToken' in subnets_r:\n subnets_r = client.get_subnets(NextToken=subnets_r['NextToken'])\n subnets_list.extend(subnets_r['Subnets'])\n for subnet in subnets_list:\n name = None\n if 'Tags' in subnet:\n for tag in subnet['Tags']:\n if tag['Key'] == 'Name':\n name = tag['Value']\n if name is not None:\n self.subnets[name] = subnet['SubnetId']\n vpcs_r = client.describe_vpcs()\n vpcs_list = vpcs_r['Vpcs']\n while 'NextToken' in vpcs_r:\n vpcs_r = client.describe_vpcs(NextToken=vpcs_r['NextToken'])\n vpcs_list.extend(vpcs_r['Subnets'])\n for vpc in vpcs_list:\n name = None\n if 'Tags' in vpc:\n for tag in vpc['Tags']:\n if tag['Key'] == 'Name':\n name = tag['Value']\n if name is not None:\n self.vpcs[name] = vpc['VpcId']\n\n def get_subnets(self, environment_name, subnetname):\n self.load()\n return list(map(lambda x: self.subnets[x], filter(lambda x: x.\n startswith(f'{environment_name}{subnetname}'), self.subnets)))\n\n\nnl = NetworkLookup()\n\n\ndef replace_subnets(value, parameters):\n if isinstance(value, str) and value.startswith('CfHl.Subnet'):\n parts = value.split('.')\n if len(parts) == 3:\n subnet_class = parts[2]\n environment_name = parameters['EnvironmentName']\n subnets = nl.get_subnets(environment_name, subnet_class)\n if parts[1] == 'Subnets':\n return subnets\n elif parts[1] == 'Subnet':\n if subnets:\n return subnets[0]\n return value\n\n\ndef replace_vpc(value, parameters):\n if isinstance(value, str) and value.startswith('CfHl.Vpc'):\n nl.load()\n parts = value.split('.')\n environment_name = parameters['EnvironmentName']\n if len(parts) == 3:\n prop = parts[2]\n if prop == 'Id':\n vpcs = nl.vpcs\n if f'{environment_name}-vpc' in vpcs:\n return vpcs[f'{environment_name}-vpc']\n return value\n\n\ndef replace_network(value, parameters):\n value = replace_subnets(value, parameters)\n value = replace_vpc(value, parameters)\n return value\n\n\nif __name__ == '__main__':\n print(replace_network('CfHl.Subnets.Public', {'EnvironmentName': 'dev'}))\n print(replace_network('CfHl.Subnet.Public0', {'EnvironmentName': 'dev'}))\n print(replace_network('CfHl.Vpc.Id', {'EnvironmentName': 'dev'}))\n",
"step-4": "import boto3\n\n\nclass NetworkLookup:\n\n def __init__(self):\n self.loaded = 0\n self.subnets = {}\n self.vpcs = {}\n\n def load(self):\n if self.loaded:\n return\n client = boto3.client('ec2')\n subnets_r = client.describe_subnets()\n subnets_list = subnets_r['Subnets']\n while 'NextToken' in subnets_r:\n subnets_r = client.get_subnets(NextToken=subnets_r['NextToken'])\n subnets_list.extend(subnets_r['Subnets'])\n for subnet in subnets_list:\n name = None\n if 'Tags' in subnet:\n for tag in subnet['Tags']:\n if tag['Key'] == 'Name':\n name = tag['Value']\n if name is not None:\n self.subnets[name] = subnet['SubnetId']\n vpcs_r = client.describe_vpcs()\n vpcs_list = vpcs_r['Vpcs']\n while 'NextToken' in vpcs_r:\n vpcs_r = client.describe_vpcs(NextToken=vpcs_r['NextToken'])\n vpcs_list.extend(vpcs_r['Subnets'])\n for vpc in vpcs_list:\n name = None\n if 'Tags' in vpc:\n for tag in vpc['Tags']:\n if tag['Key'] == 'Name':\n name = tag['Value']\n if name is not None:\n self.vpcs[name] = vpc['VpcId']\n\n def get_subnets(self, environment_name, subnetname):\n self.load()\n return list(map(lambda x: self.subnets[x], filter(lambda x: x.\n startswith(f'{environment_name}{subnetname}'), self.subnets)))\n\n\nnl = NetworkLookup()\n\n\ndef replace_subnets(value, parameters):\n if isinstance(value, str) and value.startswith('CfHl.Subnet'):\n parts = value.split('.')\n if len(parts) == 3:\n subnet_class = parts[2]\n environment_name = parameters['EnvironmentName']\n subnets = nl.get_subnets(environment_name, subnet_class)\n if parts[1] == 'Subnets':\n return subnets\n elif parts[1] == 'Subnet':\n if subnets:\n return subnets[0]\n return value\n\n\ndef replace_vpc(value, parameters):\n if isinstance(value, str) and value.startswith('CfHl.Vpc'):\n nl.load()\n parts = value.split('.')\n environment_name = parameters['EnvironmentName']\n if len(parts) == 3:\n prop = parts[2]\n if prop == 'Id':\n vpcs = nl.vpcs\n if f'{environment_name}-vpc' in vpcs:\n return vpcs[f'{environment_name}-vpc']\n return value\n\n\ndef replace_network(value, parameters):\n value = replace_subnets(value, parameters)\n value = replace_vpc(value, parameters)\n return value\n\n\nif __name__ == '__main__':\n print(replace_network('CfHl.Subnets.Public', {'EnvironmentName': 'dev'}))\n print(replace_network('CfHl.Subnet.Public0', {'EnvironmentName': 'dev'}))\n print(replace_network('CfHl.Vpc.Id', {'EnvironmentName': 'dev'}))\n",
"step-5": "import boto3\n\nclass NetworkLookup:\n\n def __init__(self):\n self.loaded = 0\n self.subnets = {}\n self.vpcs = {}\n\n def load(self):\n if self.loaded:\n return\n\n client = boto3.client('ec2')\n # load subnets\n subnets_r = client.describe_subnets()\n subnets_list = subnets_r['Subnets']\n while 'NextToken' in subnets_r:\n subnets_r = client.get_subnets(NextToken=subnets_r['NextToken'])\n subnets_list.extend(subnets_r['Subnets'])\n\n for subnet in subnets_list:\n name = None\n if 'Tags' in subnet:\n for tag in subnet['Tags']:\n if tag['Key'] == 'Name':\n name = tag['Value']\n if name is not None:\n self.subnets[name] = subnet['SubnetId']\n\n # load vpcs\n vpcs_r = client.describe_vpcs()\n vpcs_list = vpcs_r['Vpcs']\n while 'NextToken' in vpcs_r:\n vpcs_r = client.describe_vpcs(NextToken=vpcs_r['NextToken'])\n vpcs_list.extend(vpcs_r['Subnets'])\n for vpc in vpcs_list:\n name = None\n if 'Tags' in vpc:\n for tag in vpc['Tags']:\n if tag['Key'] == 'Name':\n name = tag['Value']\n if name is not None:\n self.vpcs[name] = vpc['VpcId']\n\n def get_subnets(self, environment_name, subnetname):\n self.load()\n return list(map( lambda x: self.subnets[x] ,\n filter(lambda x: x.startswith(f\"{environment_name}{subnetname}\"), self.subnets)\n ))\n\nnl = NetworkLookup()\n\ndef replace_subnets(value, parameters):\n if isinstance(value, str) and value.startswith('CfHl.Subnet'):\n parts = value.split('.')\n if len(parts) == 3:\n subnet_class = parts[2]\n environment_name = parameters['EnvironmentName']\n subnets = nl.get_subnets(environment_name, subnet_class)\n if parts[1] == 'Subnets':\n return subnets\n elif parts[1] == 'Subnet':\n if subnets:\n return subnets[0]\n return value\n\ndef replace_vpc(value, parameters):\n if isinstance(value, str) and value.startswith('CfHl.Vpc'):\n nl.load()\n parts = value.split('.')\n environment_name = parameters['EnvironmentName']\n if len(parts) == 3:\n prop = parts[2]\n if prop == 'Id':\n vpcs = nl.vpcs\n if f\"{environment_name}-vpc\" in vpcs:\n return vpcs[f\"{environment_name}-vpc\"]\n return value\n\ndef replace_network(value, parameters):\n value = replace_subnets(value, parameters)\n value = replace_vpc(value, parameters)\n return value\n\nif __name__ == '__main__':\n print(replace_network('CfHl.Subnets.Public',{'EnvironmentName':'dev'}))\n print(replace_network('CfHl.Subnet.Public0',{'EnvironmentName':'dev'}))\n print(replace_network('CfHl.Vpc.Id',{'EnvironmentName':'dev'}))\n",
"step-ids": [
6,
7,
9,
10,
11
]
}
|
[
6,
7,
9,
10,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(' Guess a number between 0 and 100')
<|reserved_special_token_0|>
while condition != 1:
counter += 1
if condition == 0:
last = middle
elif condition == 2:
start = middle
middle = int((start + last) / 2)
condition = int(input('Is your guess ' + str(middle) +
"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) "
))
print('It took us {} guesses to get it right! Cheers!'.format(counter))
<|reserved_special_token_1|>
start = 0
last = 100
middle = 50
counter = 1
print(' Guess a number between 0 and 100')
condition = int(input('Is your guess ' + str(middle) +
"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) "
))
while condition != 1:
counter += 1
if condition == 0:
last = middle
elif condition == 2:
start = middle
middle = int((start + last) / 2)
condition = int(input('Is your guess ' + str(middle) +
"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) "
))
print('It took us {} guesses to get it right! Cheers!'.format(counter))
<|reserved_special_token_1|>
start=0
last=100
middle=50
counter=1
print(" Guess a number between 0 and 100")
condition = int(input("Is your guess " + str(middle) + "? (0 means it's too low, 1 means it's your guess and 2 means it's too high) "))
while condition != 1:
counter += 1
if condition == 0:
last = middle
elif condition == 2:
start = middle
middle=int((start+last)/2)
condition = int(input("Is your guess " + str(middle) + "? (0 means it's too low, 1 means it's your guess and 2 means it's too high) "))
print("It took us {} guesses to get it right! Cheers!".format(counter))
|
flexible
|
{
"blob_id": "42d03aabef7d75c813f30bb6d8a835d76fd1fc83",
"index": 603,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(' Guess a number between 0 and 100')\n<mask token>\nwhile condition != 1:\n counter += 1\n if condition == 0:\n last = middle\n elif condition == 2:\n start = middle\n middle = int((start + last) / 2)\n condition = int(input('Is your guess ' + str(middle) +\n \"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) \"\n ))\nprint('It took us {} guesses to get it right! Cheers!'.format(counter))\n",
"step-3": "start = 0\nlast = 100\nmiddle = 50\ncounter = 1\nprint(' Guess a number between 0 and 100')\ncondition = int(input('Is your guess ' + str(middle) +\n \"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) \"\n ))\nwhile condition != 1:\n counter += 1\n if condition == 0:\n last = middle\n elif condition == 2:\n start = middle\n middle = int((start + last) / 2)\n condition = int(input('Is your guess ' + str(middle) +\n \"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) \"\n ))\nprint('It took us {} guesses to get it right! Cheers!'.format(counter))\n",
"step-4": "start=0\nlast=100\nmiddle=50\ncounter=1\n\nprint(\" Guess a number between 0 and 100\")\ncondition = int(input(\"Is your guess \" + str(middle) + \"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) \"))\n\nwhile condition != 1:\n counter += 1\n\n if condition == 0:\n last = middle\n elif condition == 2:\n start = middle\n\n middle=int((start+last)/2)\n condition = int(input(\"Is your guess \" + str(middle) + \"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) \"))\n\nprint(\"It took us {} guesses to get it right! Cheers!\".format(counter))\n\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__ = "Sponge_sy"
# Date: 2021/9/11
import numpy
from tqdm import tqdm
from bert4keras.tokenizers import Tokenizer
from bert4keras.models import build_transformer_model
from bert4keras.snippets import sequence_padding, DataGenerator
from utils import *
class data_generator(DataGenerator):
"""Data Generator"""
def __init__(self, pattern="", is_pre=True, *args, **kwargs):
super(data_generator, self).__init__(*args, **kwargs)
self.pattern = pattern
self.is_pre = is_pre
def __iter__(self, random=False):
batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []
for is_end, text in self.sample(random):
if (self.is_pre):
token_ids, segment_ids = tokenizer.encode(first_text=self.pattern, second_text=text, maxlen=maxlen)
else:
token_ids, segment_ids = tokenizer.encode(first_text=text, second_text=self.pattern, maxlen=maxlen)
source_ids, target_ids = token_ids[:], token_ids[:]
batch_token_ids.append(source_ids)
batch_segment_ids.append(segment_ids)
if len(batch_token_ids) == self.batch_size or is_end:
batch_token_ids = sequence_padding(batch_token_ids)
batch_segment_ids = sequence_padding(batch_segment_ids)
yield [batch_token_ids, batch_segment_ids], None
batch_token_ids, batch_segment_ids, = [], []
def predict(data_generator_list, data):
print("\n*******************Start to Zero-Shot predict*******************", flush=True)
patterns_logits = [[] for _ in patterns]
samples_logits = [[] for _ in data]
for i in range(len(data_generator_list)):
print("\nPattern{}".format(i), flush=True)
data_generator = data_generator_list[i]
counter = 0
for (x, _) in tqdm(data_generator):
outputs = model.predict(x[:2])
for out in outputs:
logit_pos = out[0].T
patterns_logits[i].append(logit_pos)
samples_logits[counter].append(logit_pos)
counter += 1
preds = []
for i in range(len(patterns_logits[0])):
pred = numpy.argmax([logits[i] for logits in patterns_logits])
preds.append(int(pred))
return preds, samples_logits
if __name__ == "__main__":
# Load the hyper-parameters-----------------------------------------------------------
maxlen = 128 # The max length 128 is used in our paper
batch_size = 40 # Will not influence the results
# Choose a model----------------------------------------------------------------------
# Recommend to use 'uer-mixed-bert-base'
# model_names = ['google-bert', 'google-bert-small', 'google-bert-zh',
# 'hfl-bert-wwm', 'hfl-bert-wwm-ext',
# 'uer-mixed-bert-tiny', 'uer-mixed-bert-small',
# 'uer-mixed-bert-base', 'uer-mixed-bert-large']
model_name = 'uer-mixed-bert-base'
# Choose a dataset----------------------------------------------------------------------
# dataset_names = ['eprstmt', 'tnews', 'csldcp', 'iflytek']
# dataset_name = 'eprstmt'
# Load model and dataset class
bert_model = Model(model_name=model_name)
# Create a template --------------------------------------------------------------------
label_names = ['entertainment', 'sports', 'music', 'games', 'economics', 'education']
patterns = ["This is {} news".format(label) for label in label_names]
# Prefix or Suffix-------------------------------------------------------------------
is_pre = True
# Load the demo set--------------------------------------------------------------------
demo_data_en = ['FIFA unveils biennial World Cup plan, UEFA threatens boycott',
'COVID vaccines hold up against severe Delta: US data',
'Justin Drew Bieber was born on March 1, 1994 at St. ',
'Horizon launches latest chip to take on global rivals',
'Twitch video gamers rise up to stop ‘hate raids’']
demo_data = demo_data_en
demo_generator_list = []
for p in patterns:
demo_generator_list.append(data_generator(pattern=p, is_pre=is_pre, data=demo_data, batch_size=batch_size))
# Build BERT model---------------------------------------------------------------------
tokenizer = Tokenizer('.' + bert_model.dict_path, do_lower_case=True)
# Load BERET model with NSP head
model = build_transformer_model(
config_path='.' + bert_model.config_path, checkpoint_path='.' + bert_model.checkpoint_path, with_nsp=True,
)
# Zero-Shot predict and evaluate-------------------------------------------------------
preds, samples_logits = predict(demo_generator_list, demo_data)
for i, (p, d) in enumerate(zip(preds, demo_data)):
pred_label = label_names[p]
print("Sample {}:".format(i))
print("Original Text: {}".format(d))
print("Predict label: {}".format(pred_label))
print("Logits: {}".format(samples_logits[i]))
print()
|
normal
|
{
"blob_id": "5cb390b06026bc0899c0b10dc93f3ec1f2ffefa6",
"index": 9727,
"step-1": "<mask token>\n\n\nclass data_generator(DataGenerator):\n <mask token>\n\n def __init__(self, pattern='', is_pre=True, *args, **kwargs):\n super(data_generator, self).__init__(*args, **kwargs)\n self.pattern = pattern\n self.is_pre = is_pre\n\n def __iter__(self, random=False):\n batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []\n for is_end, text in self.sample(random):\n if self.is_pre:\n token_ids, segment_ids = tokenizer.encode(first_text=self.\n pattern, second_text=text, maxlen=maxlen)\n else:\n token_ids, segment_ids = tokenizer.encode(first_text=text,\n second_text=self.pattern, maxlen=maxlen)\n source_ids, target_ids = token_ids[:], token_ids[:]\n batch_token_ids.append(source_ids)\n batch_segment_ids.append(segment_ids)\n if len(batch_token_ids) == self.batch_size or is_end:\n batch_token_ids = sequence_padding(batch_token_ids)\n batch_segment_ids = sequence_padding(batch_segment_ids)\n yield [batch_token_ids, batch_segment_ids], None\n batch_token_ids, batch_segment_ids = [], []\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass data_generator(DataGenerator):\n \"\"\"Data Generator\"\"\"\n\n def __init__(self, pattern='', is_pre=True, *args, **kwargs):\n super(data_generator, self).__init__(*args, **kwargs)\n self.pattern = pattern\n self.is_pre = is_pre\n\n def __iter__(self, random=False):\n batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []\n for is_end, text in self.sample(random):\n if self.is_pre:\n token_ids, segment_ids = tokenizer.encode(first_text=self.\n pattern, second_text=text, maxlen=maxlen)\n else:\n token_ids, segment_ids = tokenizer.encode(first_text=text,\n second_text=self.pattern, maxlen=maxlen)\n source_ids, target_ids = token_ids[:], token_ids[:]\n batch_token_ids.append(source_ids)\n batch_segment_ids.append(segment_ids)\n if len(batch_token_ids) == self.batch_size or is_end:\n batch_token_ids = sequence_padding(batch_token_ids)\n batch_segment_ids = sequence_padding(batch_segment_ids)\n yield [batch_token_ids, batch_segment_ids], None\n batch_token_ids, batch_segment_ids = [], []\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass data_generator(DataGenerator):\n \"\"\"Data Generator\"\"\"\n\n def __init__(self, pattern='', is_pre=True, *args, **kwargs):\n super(data_generator, self).__init__(*args, **kwargs)\n self.pattern = pattern\n self.is_pre = is_pre\n\n def __iter__(self, random=False):\n batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []\n for is_end, text in self.sample(random):\n if self.is_pre:\n token_ids, segment_ids = tokenizer.encode(first_text=self.\n pattern, second_text=text, maxlen=maxlen)\n else:\n token_ids, segment_ids = tokenizer.encode(first_text=text,\n second_text=self.pattern, maxlen=maxlen)\n source_ids, target_ids = token_ids[:], token_ids[:]\n batch_token_ids.append(source_ids)\n batch_segment_ids.append(segment_ids)\n if len(batch_token_ids) == self.batch_size or is_end:\n batch_token_ids = sequence_padding(batch_token_ids)\n batch_segment_ids = sequence_padding(batch_segment_ids)\n yield [batch_token_ids, batch_segment_ids], None\n batch_token_ids, batch_segment_ids = [], []\n\n\ndef predict(data_generator_list, data):\n print('\\n*******************Start to Zero-Shot predict*******************',\n flush=True)\n patterns_logits = [[] for _ in patterns]\n samples_logits = [[] for _ in data]\n for i in range(len(data_generator_list)):\n print('\\nPattern{}'.format(i), flush=True)\n data_generator = data_generator_list[i]\n counter = 0\n for x, _ in tqdm(data_generator):\n outputs = model.predict(x[:2])\n for out in outputs:\n logit_pos = out[0].T\n patterns_logits[i].append(logit_pos)\n samples_logits[counter].append(logit_pos)\n counter += 1\n preds = []\n for i in range(len(patterns_logits[0])):\n pred = numpy.argmax([logits[i] for logits in patterns_logits])\n preds.append(int(pred))\n return preds, samples_logits\n\n\nif __name__ == '__main__':\n maxlen = 128\n batch_size = 40\n model_name = 'uer-mixed-bert-base'\n bert_model = Model(model_name=model_name)\n label_names = ['entertainment', 'sports', 'music', 'games', 'economics',\n 'education']\n patterns = ['This is {} news'.format(label) for label in label_names]\n is_pre = True\n demo_data_en = [\n 'FIFA unveils biennial World Cup plan, UEFA threatens boycott',\n 'COVID vaccines hold up against severe Delta: US data',\n 'Justin Drew Bieber was born on March 1, 1994 at St. ',\n 'Horizon launches latest chip to take on global rivals',\n 'Twitch video gamers rise up to stop ‘hate raids’']\n demo_data = demo_data_en\n demo_generator_list = []\n for p in patterns:\n demo_generator_list.append(data_generator(pattern=p, is_pre=is_pre,\n data=demo_data, batch_size=batch_size))\n tokenizer = Tokenizer('.' + bert_model.dict_path, do_lower_case=True)\n model = build_transformer_model(config_path='.' + bert_model.\n config_path, checkpoint_path='.' + bert_model.checkpoint_path,\n with_nsp=True)\n preds, samples_logits = predict(demo_generator_list, demo_data)\n for i, (p, d) in enumerate(zip(preds, demo_data)):\n pred_label = label_names[p]\n print('Sample {}:'.format(i))\n print('Original Text: {}'.format(d))\n print('Predict label: {}'.format(pred_label))\n print('Logits: {}'.format(samples_logits[i]))\n print()\n",
"step-4": "import numpy\nfrom tqdm import tqdm\nfrom bert4keras.tokenizers import Tokenizer\nfrom bert4keras.models import build_transformer_model\nfrom bert4keras.snippets import sequence_padding, DataGenerator\nfrom utils import *\n\n\nclass data_generator(DataGenerator):\n \"\"\"Data Generator\"\"\"\n\n def __init__(self, pattern='', is_pre=True, *args, **kwargs):\n super(data_generator, self).__init__(*args, **kwargs)\n self.pattern = pattern\n self.is_pre = is_pre\n\n def __iter__(self, random=False):\n batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []\n for is_end, text in self.sample(random):\n if self.is_pre:\n token_ids, segment_ids = tokenizer.encode(first_text=self.\n pattern, second_text=text, maxlen=maxlen)\n else:\n token_ids, segment_ids = tokenizer.encode(first_text=text,\n second_text=self.pattern, maxlen=maxlen)\n source_ids, target_ids = token_ids[:], token_ids[:]\n batch_token_ids.append(source_ids)\n batch_segment_ids.append(segment_ids)\n if len(batch_token_ids) == self.batch_size or is_end:\n batch_token_ids = sequence_padding(batch_token_ids)\n batch_segment_ids = sequence_padding(batch_segment_ids)\n yield [batch_token_ids, batch_segment_ids], None\n batch_token_ids, batch_segment_ids = [], []\n\n\ndef predict(data_generator_list, data):\n print('\\n*******************Start to Zero-Shot predict*******************',\n flush=True)\n patterns_logits = [[] for _ in patterns]\n samples_logits = [[] for _ in data]\n for i in range(len(data_generator_list)):\n print('\\nPattern{}'.format(i), flush=True)\n data_generator = data_generator_list[i]\n counter = 0\n for x, _ in tqdm(data_generator):\n outputs = model.predict(x[:2])\n for out in outputs:\n logit_pos = out[0].T\n patterns_logits[i].append(logit_pos)\n samples_logits[counter].append(logit_pos)\n counter += 1\n preds = []\n for i in range(len(patterns_logits[0])):\n pred = numpy.argmax([logits[i] for logits in patterns_logits])\n preds.append(int(pred))\n return preds, samples_logits\n\n\nif __name__ == '__main__':\n maxlen = 128\n batch_size = 40\n model_name = 'uer-mixed-bert-base'\n bert_model = Model(model_name=model_name)\n label_names = ['entertainment', 'sports', 'music', 'games', 'economics',\n 'education']\n patterns = ['This is {} news'.format(label) for label in label_names]\n is_pre = True\n demo_data_en = [\n 'FIFA unveils biennial World Cup plan, UEFA threatens boycott',\n 'COVID vaccines hold up against severe Delta: US data',\n 'Justin Drew Bieber was born on March 1, 1994 at St. ',\n 'Horizon launches latest chip to take on global rivals',\n 'Twitch video gamers rise up to stop ‘hate raids’']\n demo_data = demo_data_en\n demo_generator_list = []\n for p in patterns:\n demo_generator_list.append(data_generator(pattern=p, is_pre=is_pre,\n data=demo_data, batch_size=batch_size))\n tokenizer = Tokenizer('.' + bert_model.dict_path, do_lower_case=True)\n model = build_transformer_model(config_path='.' + bert_model.\n config_path, checkpoint_path='.' + bert_model.checkpoint_path,\n with_nsp=True)\n preds, samples_logits = predict(demo_generator_list, demo_data)\n for i, (p, d) in enumerate(zip(preds, demo_data)):\n pred_label = label_names[p]\n print('Sample {}:'.format(i))\n print('Original Text: {}'.format(d))\n print('Predict label: {}'.format(pred_label))\n print('Logits: {}'.format(samples_logits[i]))\n print()\n",
"step-5": "#! /usr/bin/env python\n# -*- coding: utf-8 -*-\n# __author__ = \"Sponge_sy\"\n# Date: 2021/9/11\n\n\nimport numpy\nfrom tqdm import tqdm\nfrom bert4keras.tokenizers import Tokenizer\nfrom bert4keras.models import build_transformer_model\nfrom bert4keras.snippets import sequence_padding, DataGenerator\nfrom utils import *\n\n\nclass data_generator(DataGenerator):\n \"\"\"Data Generator\"\"\"\n\n def __init__(self, pattern=\"\", is_pre=True, *args, **kwargs):\n super(data_generator, self).__init__(*args, **kwargs)\n self.pattern = pattern\n self.is_pre = is_pre\n\n def __iter__(self, random=False):\n batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []\n for is_end, text in self.sample(random):\n if (self.is_pre):\n token_ids, segment_ids = tokenizer.encode(first_text=self.pattern, second_text=text, maxlen=maxlen)\n else:\n token_ids, segment_ids = tokenizer.encode(first_text=text, second_text=self.pattern, maxlen=maxlen)\n source_ids, target_ids = token_ids[:], token_ids[:]\n batch_token_ids.append(source_ids)\n batch_segment_ids.append(segment_ids)\n\n if len(batch_token_ids) == self.batch_size or is_end:\n batch_token_ids = sequence_padding(batch_token_ids)\n batch_segment_ids = sequence_padding(batch_segment_ids)\n yield [batch_token_ids, batch_segment_ids], None\n batch_token_ids, batch_segment_ids, = [], []\n\ndef predict(data_generator_list, data):\n print(\"\\n*******************Start to Zero-Shot predict*******************\", flush=True)\n patterns_logits = [[] for _ in patterns]\n samples_logits = [[] for _ in data]\n for i in range(len(data_generator_list)):\n print(\"\\nPattern{}\".format(i), flush=True)\n data_generator = data_generator_list[i]\n counter = 0\n for (x, _) in tqdm(data_generator):\n outputs = model.predict(x[:2])\n for out in outputs:\n logit_pos = out[0].T\n patterns_logits[i].append(logit_pos)\n samples_logits[counter].append(logit_pos)\n counter += 1\n preds = []\n for i in range(len(patterns_logits[0])):\n pred = numpy.argmax([logits[i] for logits in patterns_logits])\n preds.append(int(pred))\n return preds, samples_logits\n\nif __name__ == \"__main__\":\n\n # Load the hyper-parameters-----------------------------------------------------------\n maxlen = 128 # The max length 128 is used in our paper\n batch_size = 40 # Will not influence the results\n\n # Choose a model----------------------------------------------------------------------\n # Recommend to use 'uer-mixed-bert-base'\n # model_names = ['google-bert', 'google-bert-small', 'google-bert-zh',\n # 'hfl-bert-wwm', 'hfl-bert-wwm-ext',\n # 'uer-mixed-bert-tiny', 'uer-mixed-bert-small',\n # 'uer-mixed-bert-base', 'uer-mixed-bert-large']\n model_name = 'uer-mixed-bert-base'\n\n # Choose a dataset----------------------------------------------------------------------\n # dataset_names = ['eprstmt', 'tnews', 'csldcp', 'iflytek']\n # dataset_name = 'eprstmt'\n\n # Load model and dataset class\n bert_model = Model(model_name=model_name)\n\n # Create a template --------------------------------------------------------------------\n label_names = ['entertainment', 'sports', 'music', 'games', 'economics', 'education']\n patterns = [\"This is {} news\".format(label) for label in label_names]\n\n # Prefix or Suffix-------------------------------------------------------------------\n is_pre = True\n\n # Load the demo set--------------------------------------------------------------------\n\n demo_data_en = ['FIFA unveils biennial World Cup plan, UEFA threatens boycott',\n 'COVID vaccines hold up against severe Delta: US data',\n 'Justin Drew Bieber was born on March 1, 1994 at St. ',\n 'Horizon launches latest chip to take on global rivals',\n 'Twitch video gamers rise up to stop ‘hate raids’']\n\n demo_data = demo_data_en\n demo_generator_list = []\n for p in patterns:\n demo_generator_list.append(data_generator(pattern=p, is_pre=is_pre, data=demo_data, batch_size=batch_size))\n\n # Build BERT model---------------------------------------------------------------------\n tokenizer = Tokenizer('.' + bert_model.dict_path, do_lower_case=True)\n # Load BERET model with NSP head\n model = build_transformer_model(\n config_path='.' + bert_model.config_path, checkpoint_path='.' + bert_model.checkpoint_path, with_nsp=True,\n )\n\n # Zero-Shot predict and evaluate-------------------------------------------------------\n preds, samples_logits = predict(demo_generator_list, demo_data)\n for i, (p, d) in enumerate(zip(preds, demo_data)):\n pred_label = label_names[p]\n print(\"Sample {}:\".format(i))\n print(\"Original Text: {}\".format(d))\n print(\"Predict label: {}\".format(pred_label))\n print(\"Logits: {}\".format(samples_logits[i]))\n print()\n",
"step-ids": [
3,
4,
6,
7,
8
]
}
|
[
3,
4,
6,
7,
8
] |
<|reserved_special_token_0|>
class MainViewNode(NodeBase):
<|reserved_special_token_0|>
def Label(self):
return 'Main View'
<|reserved_special_token_0|>
class DockSectionNode(NodeBase):
def __init__(self, label, icon, contents, settings):
NodeBase.__init__(self, icon)
self.label = label
self.contents = contents
self.settings = settings
def Label(self):
return self.label
def OnSelection(self, treePanel):
workspace = treePanel.parent.workspacePanel
workspace.nodeData = self
workspace.SetupDockSectionControls()
class ToolbarNode(NodeBase):
def __init__(self, toolbar):
NodeBase.__init__(self, 'InstrumentAppBkg+SettingsOverlay')
self.toolbar = toolbar
def Label(self):
return 'Toolbar'
def GetButtons(self):
return self.toolbar.buttons
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.toolbar.quickOpen
workspacePanel.SetupToolbarControls()
class ButtonNode(NodeBase):
def __init__(self, button, isDockPartNode):
NodeBase.__init__(self, 'TradeEntryApp')
self.button = button
self.isDockPartNode = isDockPartNode
def Label(self):
label = self.button.HasField('label') and self.button.label.encode(
'utf-8')
return label or '<Buttons>'
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.button
workspacePanel.SetupButtonControls(self.isDockPartNode)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DashboardTabNode(NodeBase):
<|reserved_special_token_0|>
def Label(self):
label = self.tabContent.caption.encode('utf-8')
return label or '<Dashboard>'
def Contents(self):
contents = AppWorkspace.DashboardContent()
contents.ParseFromString(self.tabContent.contents)
return contents
def Settings(self):
userSettings = AppWorkspace.DashboardSettings()
userSettings.ParseFromString(self.tabContent.userSettings)
return userSettings
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self
workspacePanel.SetupDashboardTabControls()
class WorkbenchTabNode(NodeBase):
def __init__(self, tabContent):
NodeBase.__init__(self, 'Layout')
self.tabContent = tabContent
self.contents = self.Contents()
self.userSettings = self.Settings()
def Label(self):
label = self.tabContent.caption.encode('utf-8')
return label or '<Workbench>'
def Contents(self):
contents = AppWorkspace.WorkbenchContent()
contents.ParseFromString(self.tabContent.contents)
return contents
def Settings(self):
userSettings = AppWorkspace.WorkbenchSettings()
userSettings.ParseFromString(self.tabContent.userSettings)
return userSettings
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.tabContent
workspacePanel.SetupWorkbenchTabControls()
class DashboardPartNode(NodeBase):
def __init__(self, part, settings, label=None):
NodeBase.__init__(self, 'FExtension')
self.part = part
self.settings = settings
def Label(self):
v = self.part.view
label = v.caption if v.HasField('caption'
) and v.caption else v.viewName
return label.encode('utf-8') or '<Part>'
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self
workspacePanel.SetupDashboardPartControls()
class DockPartNode(NodeBase):
def __init__(self, part):
NodeBase.__init__(self, 'FExtension')
self.part = part
def Label(self):
v = self.part.view
label = v.caption if v.HasField('caption'
) and v.caption else v.viewName
return label.encode('utf-8') or '<Part>'
def GetButtons(self):
return self.part.selectionActionButtons
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.part
workspacePanel.SetupDockSectionPartControls()
class MainViewNode(NodeBase):
def __init__(self, view):
NodeBase.__init__(self, 'DisplayTabs')
self.view = view
def Label(self):
return 'Main View'
def OnSelection(self, treePanel):
workspace = treePanel.parent.workspacePanel
workspace.nodeData = self.view
workspace.SetupMainViewControls()
class DockSectionNode(NodeBase):
def __init__(self, label, icon, contents, settings):
NodeBase.__init__(self, icon)
self.label = label
self.contents = contents
self.settings = settings
def Label(self):
return self.label
def OnSelection(self, treePanel):
workspace = treePanel.parent.workspacePanel
workspace.nodeData = self
workspace.SetupDockSectionControls()
class ToolbarNode(NodeBase):
def __init__(self, toolbar):
NodeBase.__init__(self, 'InstrumentAppBkg+SettingsOverlay')
self.toolbar = toolbar
def Label(self):
return 'Toolbar'
def GetButtons(self):
return self.toolbar.buttons
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.toolbar.quickOpen
workspacePanel.SetupToolbarControls()
class ButtonNode(NodeBase):
def __init__(self, button, isDockPartNode):
NodeBase.__init__(self, 'TradeEntryApp')
self.button = button
self.isDockPartNode = isDockPartNode
def Label(self):
label = self.button.HasField('label') and self.button.label.encode(
'utf-8')
return label or '<Buttons>'
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.button
workspacePanel.SetupButtonControls(self.isDockPartNode)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DashboardTabNode(NodeBase):
def __init__(self, tabContent):
NodeBase.__init__(self, 'WindowSwitch')
self.tabContent = tabContent
self.contents = self.Contents()
self.userSettings = self.Settings()
def Label(self):
label = self.tabContent.caption.encode('utf-8')
return label or '<Dashboard>'
def Contents(self):
contents = AppWorkspace.DashboardContent()
contents.ParseFromString(self.tabContent.contents)
return contents
def Settings(self):
userSettings = AppWorkspace.DashboardSettings()
userSettings.ParseFromString(self.tabContent.userSettings)
return userSettings
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self
workspacePanel.SetupDashboardTabControls()
class WorkbenchTabNode(NodeBase):
def __init__(self, tabContent):
NodeBase.__init__(self, 'Layout')
self.tabContent = tabContent
self.contents = self.Contents()
self.userSettings = self.Settings()
def Label(self):
label = self.tabContent.caption.encode('utf-8')
return label or '<Workbench>'
def Contents(self):
contents = AppWorkspace.WorkbenchContent()
contents.ParseFromString(self.tabContent.contents)
return contents
def Settings(self):
userSettings = AppWorkspace.WorkbenchSettings()
userSettings.ParseFromString(self.tabContent.userSettings)
return userSettings
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.tabContent
workspacePanel.SetupWorkbenchTabControls()
class DashboardPartNode(NodeBase):
def __init__(self, part, settings, label=None):
NodeBase.__init__(self, 'FExtension')
self.part = part
self.settings = settings
def Label(self):
v = self.part.view
label = v.caption if v.HasField('caption'
) and v.caption else v.viewName
return label.encode('utf-8') or '<Part>'
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self
workspacePanel.SetupDashboardPartControls()
class DockPartNode(NodeBase):
def __init__(self, part):
NodeBase.__init__(self, 'FExtension')
self.part = part
def Label(self):
v = self.part.view
label = v.caption if v.HasField('caption'
) and v.caption else v.viewName
return label.encode('utf-8') or '<Part>'
def GetButtons(self):
return self.part.selectionActionButtons
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.part
workspacePanel.SetupDockSectionPartControls()
class MainViewNode(NodeBase):
def __init__(self, view):
NodeBase.__init__(self, 'DisplayTabs')
self.view = view
def Label(self):
return 'Main View'
def OnSelection(self, treePanel):
workspace = treePanel.parent.workspacePanel
workspace.nodeData = self.view
workspace.SetupMainViewControls()
class DockSectionNode(NodeBase):
def __init__(self, label, icon, contents, settings):
NodeBase.__init__(self, icon)
self.label = label
self.contents = contents
self.settings = settings
def Label(self):
return self.label
def OnSelection(self, treePanel):
workspace = treePanel.parent.workspacePanel
workspace.nodeData = self
workspace.SetupDockSectionControls()
class ToolbarNode(NodeBase):
def __init__(self, toolbar):
NodeBase.__init__(self, 'InstrumentAppBkg+SettingsOverlay')
self.toolbar = toolbar
def Label(self):
return 'Toolbar'
def GetButtons(self):
return self.toolbar.buttons
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.toolbar.quickOpen
workspacePanel.SetupToolbarControls()
class ButtonNode(NodeBase):
def __init__(self, button, isDockPartNode):
NodeBase.__init__(self, 'TradeEntryApp')
self.button = button
self.isDockPartNode = isDockPartNode
def Label(self):
label = self.button.HasField('label') and self.button.label.encode(
'utf-8')
return label or '<Buttons>'
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.button
workspacePanel.SetupButtonControls(self.isDockPartNode)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class NodeBase:
<|reserved_special_token_0|>
def Label(self):
raise NotImplementedError('Label')
<|reserved_special_token_0|>
class WorkspaceNode(NodeBase):
def __init__(self, workspace, label):
NodeBase.__init__(self, 'FWorkspace')
self.contents = workspace
self.label = label
def Label(self):
return self.label
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self
workspacePanel.SetupWorkspaceControls()
class DashboardTabNode(NodeBase):
def __init__(self, tabContent):
NodeBase.__init__(self, 'WindowSwitch')
self.tabContent = tabContent
self.contents = self.Contents()
self.userSettings = self.Settings()
def Label(self):
label = self.tabContent.caption.encode('utf-8')
return label or '<Dashboard>'
def Contents(self):
contents = AppWorkspace.DashboardContent()
contents.ParseFromString(self.tabContent.contents)
return contents
def Settings(self):
userSettings = AppWorkspace.DashboardSettings()
userSettings.ParseFromString(self.tabContent.userSettings)
return userSettings
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self
workspacePanel.SetupDashboardTabControls()
class WorkbenchTabNode(NodeBase):
def __init__(self, tabContent):
NodeBase.__init__(self, 'Layout')
self.tabContent = tabContent
self.contents = self.Contents()
self.userSettings = self.Settings()
def Label(self):
label = self.tabContent.caption.encode('utf-8')
return label or '<Workbench>'
def Contents(self):
contents = AppWorkspace.WorkbenchContent()
contents.ParseFromString(self.tabContent.contents)
return contents
def Settings(self):
userSettings = AppWorkspace.WorkbenchSettings()
userSettings.ParseFromString(self.tabContent.userSettings)
return userSettings
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.tabContent
workspacePanel.SetupWorkbenchTabControls()
class DashboardPartNode(NodeBase):
def __init__(self, part, settings, label=None):
NodeBase.__init__(self, 'FExtension')
self.part = part
self.settings = settings
def Label(self):
v = self.part.view
label = v.caption if v.HasField('caption'
) and v.caption else v.viewName
return label.encode('utf-8') or '<Part>'
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self
workspacePanel.SetupDashboardPartControls()
class DockPartNode(NodeBase):
def __init__(self, part):
NodeBase.__init__(self, 'FExtension')
self.part = part
def Label(self):
v = self.part.view
label = v.caption if v.HasField('caption'
) and v.caption else v.viewName
return label.encode('utf-8') or '<Part>'
def GetButtons(self):
return self.part.selectionActionButtons
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.part
workspacePanel.SetupDockSectionPartControls()
class MainViewNode(NodeBase):
def __init__(self, view):
NodeBase.__init__(self, 'DisplayTabs')
self.view = view
def Label(self):
return 'Main View'
def OnSelection(self, treePanel):
workspace = treePanel.parent.workspacePanel
workspace.nodeData = self.view
workspace.SetupMainViewControls()
class DockSectionNode(NodeBase):
def __init__(self, label, icon, contents, settings):
NodeBase.__init__(self, icon)
self.label = label
self.contents = contents
self.settings = settings
def Label(self):
return self.label
def OnSelection(self, treePanel):
workspace = treePanel.parent.workspacePanel
workspace.nodeData = self
workspace.SetupDockSectionControls()
class ToolbarNode(NodeBase):
def __init__(self, toolbar):
NodeBase.__init__(self, 'InstrumentAppBkg+SettingsOverlay')
self.toolbar = toolbar
def Label(self):
return 'Toolbar'
def GetButtons(self):
return self.toolbar.buttons
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.toolbar.quickOpen
workspacePanel.SetupToolbarControls()
class ButtonNode(NodeBase):
def __init__(self, button, isDockPartNode):
NodeBase.__init__(self, 'TradeEntryApp')
self.button = button
self.isDockPartNode = isDockPartNode
def Label(self):
label = self.button.HasField('label') and self.button.label.encode(
'utf-8')
return label or '<Buttons>'
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.button
workspacePanel.SetupButtonControls(self.isDockPartNode)
<|reserved_special_token_1|>
""" Compiled: 2020-09-18 10:38:52 """
#__src_file__ = "extensions/AppWorkspaceTools/etc/FAppWorkspaceDesignerNodes.py"
""" Compiled: 2018-06-07 17:06:19 """
#__src_file__ = "extensions/AppWorkspaceTools/etc/FAppWorkspaceDesignerNodes.py"
import acm
import FUxCore
import Contracts_AppConfig_Messages_AppWorkspace as AppWorkspace
class NodeBase():
def __init__(self, icon=''):
self.icon = icon
def Label(self):
raise NotImplementedError('Label')
def Icon(self):
return self.icon
class WorkspaceNode(NodeBase):
def __init__(self, workspace, label):
NodeBase.__init__(self, 'FWorkspace')
self.contents = workspace
self.label = label
def Label(self):
return self.label
def OnSelection(self, treePanel):
# TODO Don't navigate to siblings, go through parent
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self
workspacePanel.SetupWorkspaceControls()
class DashboardTabNode(NodeBase):
def __init__(self, tabContent):
NodeBase.__init__(self, 'WindowSwitch')
self.tabContent = tabContent
self.contents = self.Contents()
self.userSettings = self.Settings()
def Label(self):
label = self.tabContent.caption.encode('utf-8')
return label or '<Dashboard>'
def Contents(self):
contents = AppWorkspace.DashboardContent()
contents.ParseFromString(self.tabContent.contents)
return contents
def Settings(self):
userSettings = AppWorkspace.DashboardSettings()
userSettings.ParseFromString(self.tabContent.userSettings)
return userSettings
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self
workspacePanel.SetupDashboardTabControls()
class WorkbenchTabNode(NodeBase):
def __init__(self, tabContent):
NodeBase.__init__(self, 'Layout')
self.tabContent = tabContent
self.contents = self.Contents()
self.userSettings = self.Settings()
def Label(self):
label = self.tabContent.caption.encode('utf-8')
return label or '<Workbench>'
def Contents(self):
contents = AppWorkspace.WorkbenchContent()
contents.ParseFromString(self.tabContent.contents)
return contents
def Settings(self):
userSettings = AppWorkspace.WorkbenchSettings()
userSettings.ParseFromString(self.tabContent.userSettings)
return userSettings
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.tabContent
workspacePanel.SetupWorkbenchTabControls()
class DashboardPartNode(NodeBase):
def __init__(self, part, settings, label=None):
NodeBase.__init__(self, 'FExtension')
self.part = part
self.settings = settings
def Label(self):
v = self.part.view
label = v.caption if v.HasField('caption') and v.caption else v.viewName
return label.encode('utf-8') or '<Part>'
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self
workspacePanel.SetupDashboardPartControls()
class DockPartNode(NodeBase):
def __init__(self, part):
NodeBase.__init__(self, 'FExtension')
self.part = part
def Label(self):
v = self.part.view
label = v.caption if v.HasField('caption') and v.caption else v.viewName
return label.encode('utf-8') or '<Part>'
def GetButtons(self):
return self.part.selectionActionButtons
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.part
workspacePanel.SetupDockSectionPartControls()
class MainViewNode(NodeBase):
def __init__(self, view):
NodeBase.__init__(self, 'DisplayTabs')
self.view = view
def Label(self):
return 'Main View'
def OnSelection(self, treePanel):
workspace = treePanel.parent.workspacePanel
workspace.nodeData = self.view
workspace.SetupMainViewControls()
class DockSectionNode(NodeBase):
def __init__(self, label, icon, contents, settings):
NodeBase.__init__(self, icon)
self.label = label
self.contents = contents
self.settings = settings
def Label(self):
return self.label
def OnSelection(self, treePanel):
workspace = treePanel.parent.workspacePanel
workspace.nodeData = self
workspace.SetupDockSectionControls()
class ToolbarNode(NodeBase):
def __init__(self, toolbar):
NodeBase.__init__(self, 'InstrumentAppBkg+SettingsOverlay')
self.toolbar = toolbar
def Label(self):
return 'Toolbar'
def GetButtons(self):
return self.toolbar.buttons
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.toolbar.quickOpen
workspacePanel.SetupToolbarControls()
class ButtonNode(NodeBase):
def __init__(self, button, isDockPartNode):
NodeBase.__init__(self, 'TradeEntryApp')
self.button = button
self.isDockPartNode = isDockPartNode
def Label(self):
label = self.button.HasField('label') and \
self.button.label.encode('utf-8')
return label or '<Buttons>'
def OnSelection(self, treePanel):
workspacePanel = treePanel.parent.workspacePanel
workspacePanel.nodeData = self.button
workspacePanel.SetupButtonControls(self.isDockPartNode)
|
flexible
|
{
"blob_id": "f80de2b069cf1dee2e665556262c6e84ce04b208",
"index": 1244,
"step-1": "<mask token>\n\n\nclass MainViewNode(NodeBase):\n <mask token>\n\n def Label(self):\n return 'Main View'\n <mask token>\n\n\nclass DockSectionNode(NodeBase):\n\n def __init__(self, label, icon, contents, settings):\n NodeBase.__init__(self, icon)\n self.label = label\n self.contents = contents\n self.settings = settings\n\n def Label(self):\n return self.label\n\n def OnSelection(self, treePanel):\n workspace = treePanel.parent.workspacePanel\n workspace.nodeData = self\n workspace.SetupDockSectionControls()\n\n\nclass ToolbarNode(NodeBase):\n\n def __init__(self, toolbar):\n NodeBase.__init__(self, 'InstrumentAppBkg+SettingsOverlay')\n self.toolbar = toolbar\n\n def Label(self):\n return 'Toolbar'\n\n def GetButtons(self):\n return self.toolbar.buttons\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.toolbar.quickOpen\n workspacePanel.SetupToolbarControls()\n\n\nclass ButtonNode(NodeBase):\n\n def __init__(self, button, isDockPartNode):\n NodeBase.__init__(self, 'TradeEntryApp')\n self.button = button\n self.isDockPartNode = isDockPartNode\n\n def Label(self):\n label = self.button.HasField('label') and self.button.label.encode(\n 'utf-8')\n return label or '<Buttons>'\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.button\n workspacePanel.SetupButtonControls(self.isDockPartNode)\n",
"step-2": "<mask token>\n\n\nclass DashboardTabNode(NodeBase):\n <mask token>\n\n def Label(self):\n label = self.tabContent.caption.encode('utf-8')\n return label or '<Dashboard>'\n\n def Contents(self):\n contents = AppWorkspace.DashboardContent()\n contents.ParseFromString(self.tabContent.contents)\n return contents\n\n def Settings(self):\n userSettings = AppWorkspace.DashboardSettings()\n userSettings.ParseFromString(self.tabContent.userSettings)\n return userSettings\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self\n workspacePanel.SetupDashboardTabControls()\n\n\nclass WorkbenchTabNode(NodeBase):\n\n def __init__(self, tabContent):\n NodeBase.__init__(self, 'Layout')\n self.tabContent = tabContent\n self.contents = self.Contents()\n self.userSettings = self.Settings()\n\n def Label(self):\n label = self.tabContent.caption.encode('utf-8')\n return label or '<Workbench>'\n\n def Contents(self):\n contents = AppWorkspace.WorkbenchContent()\n contents.ParseFromString(self.tabContent.contents)\n return contents\n\n def Settings(self):\n userSettings = AppWorkspace.WorkbenchSettings()\n userSettings.ParseFromString(self.tabContent.userSettings)\n return userSettings\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.tabContent\n workspacePanel.SetupWorkbenchTabControls()\n\n\nclass DashboardPartNode(NodeBase):\n\n def __init__(self, part, settings, label=None):\n NodeBase.__init__(self, 'FExtension')\n self.part = part\n self.settings = settings\n\n def Label(self):\n v = self.part.view\n label = v.caption if v.HasField('caption'\n ) and v.caption else v.viewName\n return label.encode('utf-8') or '<Part>'\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self\n workspacePanel.SetupDashboardPartControls()\n\n\nclass DockPartNode(NodeBase):\n\n def __init__(self, part):\n NodeBase.__init__(self, 'FExtension')\n self.part = part\n\n def Label(self):\n v = self.part.view\n label = v.caption if v.HasField('caption'\n ) and v.caption else v.viewName\n return label.encode('utf-8') or '<Part>'\n\n def GetButtons(self):\n return self.part.selectionActionButtons\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.part\n workspacePanel.SetupDockSectionPartControls()\n\n\nclass MainViewNode(NodeBase):\n\n def __init__(self, view):\n NodeBase.__init__(self, 'DisplayTabs')\n self.view = view\n\n def Label(self):\n return 'Main View'\n\n def OnSelection(self, treePanel):\n workspace = treePanel.parent.workspacePanel\n workspace.nodeData = self.view\n workspace.SetupMainViewControls()\n\n\nclass DockSectionNode(NodeBase):\n\n def __init__(self, label, icon, contents, settings):\n NodeBase.__init__(self, icon)\n self.label = label\n self.contents = contents\n self.settings = settings\n\n def Label(self):\n return self.label\n\n def OnSelection(self, treePanel):\n workspace = treePanel.parent.workspacePanel\n workspace.nodeData = self\n workspace.SetupDockSectionControls()\n\n\nclass ToolbarNode(NodeBase):\n\n def __init__(self, toolbar):\n NodeBase.__init__(self, 'InstrumentAppBkg+SettingsOverlay')\n self.toolbar = toolbar\n\n def Label(self):\n return 'Toolbar'\n\n def GetButtons(self):\n return self.toolbar.buttons\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.toolbar.quickOpen\n workspacePanel.SetupToolbarControls()\n\n\nclass ButtonNode(NodeBase):\n\n def __init__(self, button, isDockPartNode):\n NodeBase.__init__(self, 'TradeEntryApp')\n self.button = button\n self.isDockPartNode = isDockPartNode\n\n def Label(self):\n label = self.button.HasField('label') and self.button.label.encode(\n 'utf-8')\n return label or '<Buttons>'\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.button\n workspacePanel.SetupButtonControls(self.isDockPartNode)\n",
"step-3": "<mask token>\n\n\nclass DashboardTabNode(NodeBase):\n\n def __init__(self, tabContent):\n NodeBase.__init__(self, 'WindowSwitch')\n self.tabContent = tabContent\n self.contents = self.Contents()\n self.userSettings = self.Settings()\n\n def Label(self):\n label = self.tabContent.caption.encode('utf-8')\n return label or '<Dashboard>'\n\n def Contents(self):\n contents = AppWorkspace.DashboardContent()\n contents.ParseFromString(self.tabContent.contents)\n return contents\n\n def Settings(self):\n userSettings = AppWorkspace.DashboardSettings()\n userSettings.ParseFromString(self.tabContent.userSettings)\n return userSettings\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self\n workspacePanel.SetupDashboardTabControls()\n\n\nclass WorkbenchTabNode(NodeBase):\n\n def __init__(self, tabContent):\n NodeBase.__init__(self, 'Layout')\n self.tabContent = tabContent\n self.contents = self.Contents()\n self.userSettings = self.Settings()\n\n def Label(self):\n label = self.tabContent.caption.encode('utf-8')\n return label or '<Workbench>'\n\n def Contents(self):\n contents = AppWorkspace.WorkbenchContent()\n contents.ParseFromString(self.tabContent.contents)\n return contents\n\n def Settings(self):\n userSettings = AppWorkspace.WorkbenchSettings()\n userSettings.ParseFromString(self.tabContent.userSettings)\n return userSettings\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.tabContent\n workspacePanel.SetupWorkbenchTabControls()\n\n\nclass DashboardPartNode(NodeBase):\n\n def __init__(self, part, settings, label=None):\n NodeBase.__init__(self, 'FExtension')\n self.part = part\n self.settings = settings\n\n def Label(self):\n v = self.part.view\n label = v.caption if v.HasField('caption'\n ) and v.caption else v.viewName\n return label.encode('utf-8') or '<Part>'\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self\n workspacePanel.SetupDashboardPartControls()\n\n\nclass DockPartNode(NodeBase):\n\n def __init__(self, part):\n NodeBase.__init__(self, 'FExtension')\n self.part = part\n\n def Label(self):\n v = self.part.view\n label = v.caption if v.HasField('caption'\n ) and v.caption else v.viewName\n return label.encode('utf-8') or '<Part>'\n\n def GetButtons(self):\n return self.part.selectionActionButtons\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.part\n workspacePanel.SetupDockSectionPartControls()\n\n\nclass MainViewNode(NodeBase):\n\n def __init__(self, view):\n NodeBase.__init__(self, 'DisplayTabs')\n self.view = view\n\n def Label(self):\n return 'Main View'\n\n def OnSelection(self, treePanel):\n workspace = treePanel.parent.workspacePanel\n workspace.nodeData = self.view\n workspace.SetupMainViewControls()\n\n\nclass DockSectionNode(NodeBase):\n\n def __init__(self, label, icon, contents, settings):\n NodeBase.__init__(self, icon)\n self.label = label\n self.contents = contents\n self.settings = settings\n\n def Label(self):\n return self.label\n\n def OnSelection(self, treePanel):\n workspace = treePanel.parent.workspacePanel\n workspace.nodeData = self\n workspace.SetupDockSectionControls()\n\n\nclass ToolbarNode(NodeBase):\n\n def __init__(self, toolbar):\n NodeBase.__init__(self, 'InstrumentAppBkg+SettingsOverlay')\n self.toolbar = toolbar\n\n def Label(self):\n return 'Toolbar'\n\n def GetButtons(self):\n return self.toolbar.buttons\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.toolbar.quickOpen\n workspacePanel.SetupToolbarControls()\n\n\nclass ButtonNode(NodeBase):\n\n def __init__(self, button, isDockPartNode):\n NodeBase.__init__(self, 'TradeEntryApp')\n self.button = button\n self.isDockPartNode = isDockPartNode\n\n def Label(self):\n label = self.button.HasField('label') and self.button.label.encode(\n 'utf-8')\n return label or '<Buttons>'\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.button\n workspacePanel.SetupButtonControls(self.isDockPartNode)\n",
"step-4": "<mask token>\n\n\nclass NodeBase:\n <mask token>\n\n def Label(self):\n raise NotImplementedError('Label')\n <mask token>\n\n\nclass WorkspaceNode(NodeBase):\n\n def __init__(self, workspace, label):\n NodeBase.__init__(self, 'FWorkspace')\n self.contents = workspace\n self.label = label\n\n def Label(self):\n return self.label\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self\n workspacePanel.SetupWorkspaceControls()\n\n\nclass DashboardTabNode(NodeBase):\n\n def __init__(self, tabContent):\n NodeBase.__init__(self, 'WindowSwitch')\n self.tabContent = tabContent\n self.contents = self.Contents()\n self.userSettings = self.Settings()\n\n def Label(self):\n label = self.tabContent.caption.encode('utf-8')\n return label or '<Dashboard>'\n\n def Contents(self):\n contents = AppWorkspace.DashboardContent()\n contents.ParseFromString(self.tabContent.contents)\n return contents\n\n def Settings(self):\n userSettings = AppWorkspace.DashboardSettings()\n userSettings.ParseFromString(self.tabContent.userSettings)\n return userSettings\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self\n workspacePanel.SetupDashboardTabControls()\n\n\nclass WorkbenchTabNode(NodeBase):\n\n def __init__(self, tabContent):\n NodeBase.__init__(self, 'Layout')\n self.tabContent = tabContent\n self.contents = self.Contents()\n self.userSettings = self.Settings()\n\n def Label(self):\n label = self.tabContent.caption.encode('utf-8')\n return label or '<Workbench>'\n\n def Contents(self):\n contents = AppWorkspace.WorkbenchContent()\n contents.ParseFromString(self.tabContent.contents)\n return contents\n\n def Settings(self):\n userSettings = AppWorkspace.WorkbenchSettings()\n userSettings.ParseFromString(self.tabContent.userSettings)\n return userSettings\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.tabContent\n workspacePanel.SetupWorkbenchTabControls()\n\n\nclass DashboardPartNode(NodeBase):\n\n def __init__(self, part, settings, label=None):\n NodeBase.__init__(self, 'FExtension')\n self.part = part\n self.settings = settings\n\n def Label(self):\n v = self.part.view\n label = v.caption if v.HasField('caption'\n ) and v.caption else v.viewName\n return label.encode('utf-8') or '<Part>'\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self\n workspacePanel.SetupDashboardPartControls()\n\n\nclass DockPartNode(NodeBase):\n\n def __init__(self, part):\n NodeBase.__init__(self, 'FExtension')\n self.part = part\n\n def Label(self):\n v = self.part.view\n label = v.caption if v.HasField('caption'\n ) and v.caption else v.viewName\n return label.encode('utf-8') or '<Part>'\n\n def GetButtons(self):\n return self.part.selectionActionButtons\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.part\n workspacePanel.SetupDockSectionPartControls()\n\n\nclass MainViewNode(NodeBase):\n\n def __init__(self, view):\n NodeBase.__init__(self, 'DisplayTabs')\n self.view = view\n\n def Label(self):\n return 'Main View'\n\n def OnSelection(self, treePanel):\n workspace = treePanel.parent.workspacePanel\n workspace.nodeData = self.view\n workspace.SetupMainViewControls()\n\n\nclass DockSectionNode(NodeBase):\n\n def __init__(self, label, icon, contents, settings):\n NodeBase.__init__(self, icon)\n self.label = label\n self.contents = contents\n self.settings = settings\n\n def Label(self):\n return self.label\n\n def OnSelection(self, treePanel):\n workspace = treePanel.parent.workspacePanel\n workspace.nodeData = self\n workspace.SetupDockSectionControls()\n\n\nclass ToolbarNode(NodeBase):\n\n def __init__(self, toolbar):\n NodeBase.__init__(self, 'InstrumentAppBkg+SettingsOverlay')\n self.toolbar = toolbar\n\n def Label(self):\n return 'Toolbar'\n\n def GetButtons(self):\n return self.toolbar.buttons\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.toolbar.quickOpen\n workspacePanel.SetupToolbarControls()\n\n\nclass ButtonNode(NodeBase):\n\n def __init__(self, button, isDockPartNode):\n NodeBase.__init__(self, 'TradeEntryApp')\n self.button = button\n self.isDockPartNode = isDockPartNode\n\n def Label(self):\n label = self.button.HasField('label') and self.button.label.encode(\n 'utf-8')\n return label or '<Buttons>'\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.button\n workspacePanel.SetupButtonControls(self.isDockPartNode)\n",
"step-5": "\"\"\" Compiled: 2020-09-18 10:38:52 \"\"\"\n\n#__src_file__ = \"extensions/AppWorkspaceTools/etc/FAppWorkspaceDesignerNodes.py\"\n\"\"\" Compiled: 2018-06-07 17:06:19 \"\"\"\n\n#__src_file__ = \"extensions/AppWorkspaceTools/etc/FAppWorkspaceDesignerNodes.py\"\nimport acm\nimport FUxCore\nimport Contracts_AppConfig_Messages_AppWorkspace as AppWorkspace\n\nclass NodeBase():\n def __init__(self, icon=''):\n self.icon = icon\n \n def Label(self):\n raise NotImplementedError('Label')\n \n def Icon(self):\n return self.icon\n\nclass WorkspaceNode(NodeBase):\n def __init__(self, workspace, label):\n NodeBase.__init__(self, 'FWorkspace')\n self.contents = workspace\n self.label = label\n \n def Label(self):\n return self.label\n \n def OnSelection(self, treePanel):\n # TODO Don't navigate to siblings, go through parent\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self\n workspacePanel.SetupWorkspaceControls()\n \nclass DashboardTabNode(NodeBase):\n def __init__(self, tabContent):\n NodeBase.__init__(self, 'WindowSwitch')\n self.tabContent = tabContent\n self.contents = self.Contents()\n self.userSettings = self.Settings()\n \n def Label(self):\n label = self.tabContent.caption.encode('utf-8')\n return label or '<Dashboard>'\n \n def Contents(self):\n contents = AppWorkspace.DashboardContent()\n contents.ParseFromString(self.tabContent.contents)\n return contents\n \n def Settings(self):\n userSettings = AppWorkspace.DashboardSettings()\n userSettings.ParseFromString(self.tabContent.userSettings)\n return userSettings\n \n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self\n workspacePanel.SetupDashboardTabControls()\n \nclass WorkbenchTabNode(NodeBase):\n def __init__(self, tabContent):\n NodeBase.__init__(self, 'Layout')\n self.tabContent = tabContent\n self.contents = self.Contents()\n self.userSettings = self.Settings()\n \n def Label(self):\n label = self.tabContent.caption.encode('utf-8') \n return label or '<Workbench>'\n \n def Contents(self):\n contents = AppWorkspace.WorkbenchContent()\n contents.ParseFromString(self.tabContent.contents)\n return contents\n \n def Settings(self):\n userSettings = AppWorkspace.WorkbenchSettings()\n userSettings.ParseFromString(self.tabContent.userSettings)\n return userSettings\n \n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.tabContent\n workspacePanel.SetupWorkbenchTabControls()\n \nclass DashboardPartNode(NodeBase):\n def __init__(self, part, settings, label=None):\n NodeBase.__init__(self, 'FExtension')\n self.part = part \n self.settings = settings\n \n def Label(self):\n v = self.part.view\n label = v.caption if v.HasField('caption') and v.caption else v.viewName\n return label.encode('utf-8') or '<Part>'\n \n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self\n workspacePanel.SetupDashboardPartControls()\n\nclass DockPartNode(NodeBase):\n def __init__(self, part):\n NodeBase.__init__(self, 'FExtension')\n self.part = part\n \n def Label(self):\n v = self.part.view\n label = v.caption if v.HasField('caption') and v.caption else v.viewName\n return label.encode('utf-8') or '<Part>'\n\n def GetButtons(self):\n return self.part.selectionActionButtons\n \n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.part\n workspacePanel.SetupDockSectionPartControls()\n\nclass MainViewNode(NodeBase):\n def __init__(self, view):\n NodeBase.__init__(self, 'DisplayTabs')\n self.view = view\n \n def Label(self):\n return 'Main View'\n \n def OnSelection(self, treePanel):\n workspace = treePanel.parent.workspacePanel\n workspace.nodeData = self.view\n workspace.SetupMainViewControls()\n \nclass DockSectionNode(NodeBase):\n def __init__(self, label, icon, contents, settings):\n NodeBase.__init__(self, icon)\n self.label = label\n self.contents = contents\n self.settings = settings\n \n def Label(self):\n return self.label\n \n def OnSelection(self, treePanel):\n workspace = treePanel.parent.workspacePanel\n workspace.nodeData = self\n workspace.SetupDockSectionControls()\n \nclass ToolbarNode(NodeBase):\n def __init__(self, toolbar):\n NodeBase.__init__(self, 'InstrumentAppBkg+SettingsOverlay')\n self.toolbar = toolbar\n \n def Label(self):\n return 'Toolbar'\n\n def GetButtons(self):\n return self.toolbar.buttons \n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.toolbar.quickOpen\n workspacePanel.SetupToolbarControls()\n \nclass ButtonNode(NodeBase):\n def __init__(self, button, isDockPartNode):\n NodeBase.__init__(self, 'TradeEntryApp')\n self.button = button\n self.isDockPartNode = isDockPartNode\n \n def Label(self):\n label = self.button.HasField('label') and \\\n self.button.label.encode('utf-8')\n return label or '<Buttons>'\n\n def OnSelection(self, treePanel):\n workspacePanel = treePanel.parent.workspacePanel\n workspacePanel.nodeData = self.button\n workspacePanel.SetupButtonControls(self.isDockPartNode)",
"step-ids": [
15,
37,
38,
44,
48
]
}
|
[
15,
37,
38,
44,
48
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution(object):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution(object):
def findPaths(self, m, n, N, i, j):
"""
:type m: int
:type n: int
:type N: int
:type i: int
:type j: int
:rtype: int
"""
MOD = 10 ** 9 + 7
dz = zip((1, 0, -1, 0), (0, 1, 0, -1))
dp = [([0] * n) for x in range(m)]
dp[i][j] = 1
ans = 0
for _ in range(N):
ndp = [([0] * n) for x in range(m)]
for x in range(m):
for y in range(n):
for dx, dy in dz:
nx, ny = x + dx, y + dy
if 0 <= nx < m and 0 <= ny < n:
ndp[nx][ny] = (ndp[nx][ny] + dp[x][y]) % MOD
else:
ans = (ans + dp[x][y]) % MOD
dp = ndp
return ans
<|reserved_special_token_1|>
class Solution(object):
def findPaths(self, m, n, N, i, j):
"""
:type m: int
:type n: int
:type N: int
:type i: int
:type j: int
:rtype: int
"""
MOD = 10 ** 9 + 7
dz = zip((1,0,-1,0),(0,1,0,-1))
dp = [[0]* n for x in range(m)]
dp[i][j] = 1
ans = 0
for _ in range(N):
ndp = [[0] * n for x in range(m)]
for x in range(m):
for y in range(n):
for dx,dy in dz:
nx,ny = x + dx, y+dy
if 0 <= nx < m and 0 <= ny <n:
ndp[nx][ny]= (ndp[nx][ny]+dp[x][y])%MOD
else:
ans = (ans + dp[x][y])% MOD
dp = ndp
return ans
|
flexible
|
{
"blob_id": "ebbc79d6582f7d6139e0dcec6333b679bb86c63c",
"index": 1383,
"step-1": "<mask token>\n",
"step-2": "class Solution(object):\n <mask token>\n",
"step-3": "class Solution(object):\n\n def findPaths(self, m, n, N, i, j):\n \"\"\"\n :type m: int\n :type n: int\n :type N: int\n :type i: int\n :type j: int\n :rtype: int\n \"\"\"\n MOD = 10 ** 9 + 7\n dz = zip((1, 0, -1, 0), (0, 1, 0, -1))\n dp = [([0] * n) for x in range(m)]\n dp[i][j] = 1\n ans = 0\n for _ in range(N):\n ndp = [([0] * n) for x in range(m)]\n for x in range(m):\n for y in range(n):\n for dx, dy in dz:\n nx, ny = x + dx, y + dy\n if 0 <= nx < m and 0 <= ny < n:\n ndp[nx][ny] = (ndp[nx][ny] + dp[x][y]) % MOD\n else:\n ans = (ans + dp[x][y]) % MOD\n dp = ndp\n return ans\n",
"step-4": "class Solution(object):\n def findPaths(self, m, n, N, i, j):\n \"\"\"\n :type m: int\n :type n: int\n :type N: int\n :type i: int\n :type j: int\n :rtype: int\n \"\"\"\n MOD = 10 ** 9 + 7\n dz = zip((1,0,-1,0),(0,1,0,-1))\n dp = [[0]* n for x in range(m)]\n dp[i][j] = 1\n ans = 0\n for _ in range(N):\n ndp = [[0] * n for x in range(m)]\n for x in range(m):\n for y in range(n):\n for dx,dy in dz:\n nx,ny = x + dx, y+dy\n if 0 <= nx < m and 0 <= ny <n:\n ndp[nx][ny]= (ndp[nx][ny]+dp[x][y])%MOD\n else:\n ans = (ans + dp[x][y])% MOD\n \n dp = ndp\n \n return ans\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#Create a 3x3 identity matrix
import numpy as np
vector = np.zeros((8,8))
vector[1::2,::2]=1
vector[::2,1::2]=1
print(vector)
'''
Output
[[0. 1. 0. 1. 0. 1. 0. 1.]
[1. 0. 1. 0. 1. 0. 1. 0.]
[0. 1. 0. 1. 0. 1. 0. 1.]
[1. 0. 1. 0. 1. 0. 1. 0.]
[0. 1. 0. 1. 0. 1. 0. 1.]
[1. 0. 1. 0. 1. 0. 1. 0.]
[0. 1. 0. 1. 0. 1. 0. 1.]
[1. 0. 1. 0. 1. 0. 1. 0.]]
'''
|
normal
|
{
"blob_id": "10d3ee459a296c26429659a202833a9570cf9454",
"index": 9639,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(vector)\n<mask token>\n",
"step-3": "<mask token>\nvector = np.zeros((8, 8))\nvector[1::2, ::2] = 1\nvector[::2, 1::2] = 1\nprint(vector)\n<mask token>\n",
"step-4": "import numpy as np\nvector = np.zeros((8, 8))\nvector[1::2, ::2] = 1\nvector[::2, 1::2] = 1\nprint(vector)\n<mask token>\n",
"step-5": "#Create a 3x3 identity matrix\n\n\nimport numpy as np\n\nvector = np.zeros((8,8))\nvector[1::2,::2]=1\nvector[::2,1::2]=1\nprint(vector)\n\n'''\nOutput\n\n[[0. 1. 0. 1. 0. 1. 0. 1.]\n [1. 0. 1. 0. 1. 0. 1. 0.]\n [0. 1. 0. 1. 0. 1. 0. 1.]\n [1. 0. 1. 0. 1. 0. 1. 0.]\n [0. 1. 0. 1. 0. 1. 0. 1.]\n [1. 0. 1. 0. 1. 0. 1. 0.]\n [0. 1. 0. 1. 0. 1. 0. 1.]\n [1. 0. 1. 0. 1. 0. 1. 0.]]\n\n'''",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def extract(input_data: str) ->tuple:
"""take input data and return the appropriate data structure"""
sheet = set()
folds = list()
s_instr, f_instr = input_data.split('\n\n')
for line in s_instr.split('\n'):
sheet.add(tuple(map(int, line.split(','))))
for line in f_instr.split('\n'):
equal_pos = line.index('=')
folds.append((line[equal_pos - 1], int(line[equal_pos + 1:])))
return sheet, folds
def fold(sheet: set, direction: str, axis: int):
folded = set()
for x, y in sheet:
if direction == 'x' and x > axis:
x = 2 * axis - x
elif direction == 'y' and y > axis:
y = 2 * axis - y
folded.add((x, y))
return folded
def part1(entries: tuple) ->int:
"""part1 solver take the entries and return the part1 solution"""
direction, axis = entries[1][0]
sheet = fold(entries[0], direction, axis)
return len(sheet)
def part2(entries: tuple) ->str:
"""part2 solver take the entries and return the part2 solution"""
sheet = entries[0]
fold_instructions = entries[1]
for direction, axis in fold_instructions:
sheet = fold(sheet, direction, axis)
max_x = max(p[0] for p in sheet)
max_y = max(p[1] for p in sheet)
out = ''
for y in range(max_y + 1):
for x in range(max_x + 1):
out += '#' if (x, y) in sheet else ' '
out += '\n'
return out
def test_input_day_13():
"""pytest testing function"""
entries = extract(TEST_INPUT)
assert part1(entries) == 17
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def read_input(input_path: str) ->str:
"""take input file path and return a str with the file's content"""
with open(input_path, 'r') as input_file:
input_data = input_file.read().strip()
return input_data
def extract(input_data: str) ->tuple:
"""take input data and return the appropriate data structure"""
sheet = set()
folds = list()
s_instr, f_instr = input_data.split('\n\n')
for line in s_instr.split('\n'):
sheet.add(tuple(map(int, line.split(','))))
for line in f_instr.split('\n'):
equal_pos = line.index('=')
folds.append((line[equal_pos - 1], int(line[equal_pos + 1:])))
return sheet, folds
def fold(sheet: set, direction: str, axis: int):
folded = set()
for x, y in sheet:
if direction == 'x' and x > axis:
x = 2 * axis - x
elif direction == 'y' and y > axis:
y = 2 * axis - y
folded.add((x, y))
return folded
def part1(entries: tuple) ->int:
"""part1 solver take the entries and return the part1 solution"""
direction, axis = entries[1][0]
sheet = fold(entries[0], direction, axis)
return len(sheet)
def part2(entries: tuple) ->str:
"""part2 solver take the entries and return the part2 solution"""
sheet = entries[0]
fold_instructions = entries[1]
for direction, axis in fold_instructions:
sheet = fold(sheet, direction, axis)
max_x = max(p[0] for p in sheet)
max_y = max(p[1] for p in sheet)
out = ''
for y in range(max_y + 1):
for x in range(max_x + 1):
out += '#' if (x, y) in sheet else ' '
out += '\n'
return out
def test_input_day_13():
"""pytest testing function"""
entries = extract(TEST_INPUT)
assert part1(entries) == 17
def test_bench_day_13(benchmark):
"""pytest-benchmark function"""
benchmark(main)
def main():
"""main function"""
input_path = str(pathlib.Path(__file__).resolve().parent.parent
) + '/inputs/' + str(pathlib.Path(__file__).stem)
start_time = time.time()
input_data = read_input(input_path)
entries = extract(input_data)
print('Part 1: %d' % part1(entries))
print('Part 2:\n%s' % part2(entries))
end_time = time.time()
print('Execution time: %f' % (end_time - start_time))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
TEST_INPUT = """6,10
0,14
9,10
0,3
10,4
4,11
6,0
6,12
4,1
0,13
10,12
3,4
3,0
8,4
1,10
2,14
8,10
9,0
fold along y=7
fold along x=5"""
def read_input(input_path: str) ->str:
"""take input file path and return a str with the file's content"""
with open(input_path, 'r') as input_file:
input_data = input_file.read().strip()
return input_data
def extract(input_data: str) ->tuple:
"""take input data and return the appropriate data structure"""
sheet = set()
folds = list()
s_instr, f_instr = input_data.split('\n\n')
for line in s_instr.split('\n'):
sheet.add(tuple(map(int, line.split(','))))
for line in f_instr.split('\n'):
equal_pos = line.index('=')
folds.append((line[equal_pos - 1], int(line[equal_pos + 1:])))
return sheet, folds
def fold(sheet: set, direction: str, axis: int):
folded = set()
for x, y in sheet:
if direction == 'x' and x > axis:
x = 2 * axis - x
elif direction == 'y' and y > axis:
y = 2 * axis - y
folded.add((x, y))
return folded
def part1(entries: tuple) ->int:
"""part1 solver take the entries and return the part1 solution"""
direction, axis = entries[1][0]
sheet = fold(entries[0], direction, axis)
return len(sheet)
def part2(entries: tuple) ->str:
"""part2 solver take the entries and return the part2 solution"""
sheet = entries[0]
fold_instructions = entries[1]
for direction, axis in fold_instructions:
sheet = fold(sheet, direction, axis)
max_x = max(p[0] for p in sheet)
max_y = max(p[1] for p in sheet)
out = ''
for y in range(max_y + 1):
for x in range(max_x + 1):
out += '#' if (x, y) in sheet else ' '
out += '\n'
return out
def test_input_day_13():
"""pytest testing function"""
entries = extract(TEST_INPUT)
assert part1(entries) == 17
def test_bench_day_13(benchmark):
"""pytest-benchmark function"""
benchmark(main)
def main():
"""main function"""
input_path = str(pathlib.Path(__file__).resolve().parent.parent
) + '/inputs/' + str(pathlib.Path(__file__).stem)
start_time = time.time()
input_data = read_input(input_path)
entries = extract(input_data)
print('Part 1: %d' % part1(entries))
print('Part 2:\n%s' % part2(entries))
end_time = time.time()
print('Execution time: %f' % (end_time - start_time))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import pathlib
import time
TEST_INPUT = """6,10
0,14
9,10
0,3
10,4
4,11
6,0
6,12
4,1
0,13
10,12
3,4
3,0
8,4
1,10
2,14
8,10
9,0
fold along y=7
fold along x=5"""
def read_input(input_path: str) ->str:
"""take input file path and return a str with the file's content"""
with open(input_path, 'r') as input_file:
input_data = input_file.read().strip()
return input_data
def extract(input_data: str) ->tuple:
"""take input data and return the appropriate data structure"""
sheet = set()
folds = list()
s_instr, f_instr = input_data.split('\n\n')
for line in s_instr.split('\n'):
sheet.add(tuple(map(int, line.split(','))))
for line in f_instr.split('\n'):
equal_pos = line.index('=')
folds.append((line[equal_pos - 1], int(line[equal_pos + 1:])))
return sheet, folds
def fold(sheet: set, direction: str, axis: int):
folded = set()
for x, y in sheet:
if direction == 'x' and x > axis:
x = 2 * axis - x
elif direction == 'y' and y > axis:
y = 2 * axis - y
folded.add((x, y))
return folded
def part1(entries: tuple) ->int:
"""part1 solver take the entries and return the part1 solution"""
direction, axis = entries[1][0]
sheet = fold(entries[0], direction, axis)
return len(sheet)
def part2(entries: tuple) ->str:
"""part2 solver take the entries and return the part2 solution"""
sheet = entries[0]
fold_instructions = entries[1]
for direction, axis in fold_instructions:
sheet = fold(sheet, direction, axis)
max_x = max(p[0] for p in sheet)
max_y = max(p[1] for p in sheet)
out = ''
for y in range(max_y + 1):
for x in range(max_x + 1):
out += '#' if (x, y) in sheet else ' '
out += '\n'
return out
def test_input_day_13():
"""pytest testing function"""
entries = extract(TEST_INPUT)
assert part1(entries) == 17
def test_bench_day_13(benchmark):
"""pytest-benchmark function"""
benchmark(main)
def main():
"""main function"""
input_path = str(pathlib.Path(__file__).resolve().parent.parent
) + '/inputs/' + str(pathlib.Path(__file__).stem)
start_time = time.time()
input_data = read_input(input_path)
entries = extract(input_data)
print('Part 1: %d' % part1(entries))
print('Part 2:\n%s' % part2(entries))
end_time = time.time()
print('Execution time: %f' % (end_time - start_time))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
"""AOC Day 13"""
import pathlib
import time
TEST_INPUT = """6,10
0,14
9,10
0,3
10,4
4,11
6,0
6,12
4,1
0,13
10,12
3,4
3,0
8,4
1,10
2,14
8,10
9,0
fold along y=7
fold along x=5"""
def read_input(input_path: str) -> str:
"""take input file path and return a str with the file's content"""
with open(input_path, 'r') as input_file:
input_data = input_file.read().strip()
return input_data
def extract(input_data: str) -> tuple:
"""take input data and return the appropriate data structure"""
sheet = set()
folds = list()
s_instr, f_instr = input_data.split('\n\n')
for line in s_instr.split('\n'):
sheet.add(tuple(map(int, line.split(','))))
for line in f_instr.split('\n'):
equal_pos = line.index('=')
folds.append((line[equal_pos-1], int(line[equal_pos+1:])))
return (sheet, folds)
def fold(sheet: set, direction: str, axis: int):
folded = set()
for x, y in sheet:
if direction == 'x' and x > axis:
x = 2 * axis - x
elif direction == 'y' and y > axis:
y = 2 * axis - y
folded.add((x, y))
return folded
def part1(entries: tuple) -> int:
"""part1 solver take the entries and return the part1 solution"""
direction, axis = entries[1][0]
sheet = fold(entries[0], direction, axis)
return len(sheet)
def part2(entries: tuple) -> str:
"""part2 solver take the entries and return the part2 solution"""
sheet = entries[0]
fold_instructions = entries[1]
for direction, axis in fold_instructions:
sheet = fold(sheet, direction, axis)
max_x = max(p[0] for p in sheet)
max_y = max(p[1] for p in sheet)
out = ''
for y in range(max_y + 1):
for x in range(max_x + 1):
out += '#' if (x, y) in sheet else ' '
out += '\n'
return out
def test_input_day_13():
"""pytest testing function"""
entries = extract(TEST_INPUT)
assert part1(entries) == 17
def test_bench_day_13(benchmark):
"""pytest-benchmark function"""
benchmark(main)
def main():
"""main function"""
input_path = str(pathlib.Path(__file__).resolve().parent.parent) + "/inputs/" + str(pathlib.Path(__file__).stem)
start_time = time.time()
input_data = read_input(input_path)
entries = extract(input_data)
print("Part 1: %d" % part1(entries))
print("Part 2:\n%s" % part2(entries))
end_time = time.time()
print("Execution time: %f" % (end_time-start_time))
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "bda28e5a0cb8a3dddea58c9c59a165b31274ac03",
"index": 5225,
"step-1": "<mask token>\n\n\ndef extract(input_data: str) ->tuple:\n \"\"\"take input data and return the appropriate data structure\"\"\"\n sheet = set()\n folds = list()\n s_instr, f_instr = input_data.split('\\n\\n')\n for line in s_instr.split('\\n'):\n sheet.add(tuple(map(int, line.split(','))))\n for line in f_instr.split('\\n'):\n equal_pos = line.index('=')\n folds.append((line[equal_pos - 1], int(line[equal_pos + 1:])))\n return sheet, folds\n\n\ndef fold(sheet: set, direction: str, axis: int):\n folded = set()\n for x, y in sheet:\n if direction == 'x' and x > axis:\n x = 2 * axis - x\n elif direction == 'y' and y > axis:\n y = 2 * axis - y\n folded.add((x, y))\n return folded\n\n\ndef part1(entries: tuple) ->int:\n \"\"\"part1 solver take the entries and return the part1 solution\"\"\"\n direction, axis = entries[1][0]\n sheet = fold(entries[0], direction, axis)\n return len(sheet)\n\n\ndef part2(entries: tuple) ->str:\n \"\"\"part2 solver take the entries and return the part2 solution\"\"\"\n sheet = entries[0]\n fold_instructions = entries[1]\n for direction, axis in fold_instructions:\n sheet = fold(sheet, direction, axis)\n max_x = max(p[0] for p in sheet)\n max_y = max(p[1] for p in sheet)\n out = ''\n for y in range(max_y + 1):\n for x in range(max_x + 1):\n out += '#' if (x, y) in sheet else ' '\n out += '\\n'\n return out\n\n\ndef test_input_day_13():\n \"\"\"pytest testing function\"\"\"\n entries = extract(TEST_INPUT)\n assert part1(entries) == 17\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef read_input(input_path: str) ->str:\n \"\"\"take input file path and return a str with the file's content\"\"\"\n with open(input_path, 'r') as input_file:\n input_data = input_file.read().strip()\n return input_data\n\n\ndef extract(input_data: str) ->tuple:\n \"\"\"take input data and return the appropriate data structure\"\"\"\n sheet = set()\n folds = list()\n s_instr, f_instr = input_data.split('\\n\\n')\n for line in s_instr.split('\\n'):\n sheet.add(tuple(map(int, line.split(','))))\n for line in f_instr.split('\\n'):\n equal_pos = line.index('=')\n folds.append((line[equal_pos - 1], int(line[equal_pos + 1:])))\n return sheet, folds\n\n\ndef fold(sheet: set, direction: str, axis: int):\n folded = set()\n for x, y in sheet:\n if direction == 'x' and x > axis:\n x = 2 * axis - x\n elif direction == 'y' and y > axis:\n y = 2 * axis - y\n folded.add((x, y))\n return folded\n\n\ndef part1(entries: tuple) ->int:\n \"\"\"part1 solver take the entries and return the part1 solution\"\"\"\n direction, axis = entries[1][0]\n sheet = fold(entries[0], direction, axis)\n return len(sheet)\n\n\ndef part2(entries: tuple) ->str:\n \"\"\"part2 solver take the entries and return the part2 solution\"\"\"\n sheet = entries[0]\n fold_instructions = entries[1]\n for direction, axis in fold_instructions:\n sheet = fold(sheet, direction, axis)\n max_x = max(p[0] for p in sheet)\n max_y = max(p[1] for p in sheet)\n out = ''\n for y in range(max_y + 1):\n for x in range(max_x + 1):\n out += '#' if (x, y) in sheet else ' '\n out += '\\n'\n return out\n\n\ndef test_input_day_13():\n \"\"\"pytest testing function\"\"\"\n entries = extract(TEST_INPUT)\n assert part1(entries) == 17\n\n\ndef test_bench_day_13(benchmark):\n \"\"\"pytest-benchmark function\"\"\"\n benchmark(main)\n\n\ndef main():\n \"\"\"main function\"\"\"\n input_path = str(pathlib.Path(__file__).resolve().parent.parent\n ) + '/inputs/' + str(pathlib.Path(__file__).stem)\n start_time = time.time()\n input_data = read_input(input_path)\n entries = extract(input_data)\n print('Part 1: %d' % part1(entries))\n print('Part 2:\\n%s' % part2(entries))\n end_time = time.time()\n print('Execution time: %f' % (end_time - start_time))\n\n\n<mask token>\n",
"step-3": "<mask token>\nTEST_INPUT = \"\"\"6,10\n0,14\n9,10\n0,3\n10,4\n4,11\n6,0\n6,12\n4,1\n0,13\n10,12\n3,4\n3,0\n8,4\n1,10\n2,14\n8,10\n9,0\n\nfold along y=7\nfold along x=5\"\"\"\n\n\ndef read_input(input_path: str) ->str:\n \"\"\"take input file path and return a str with the file's content\"\"\"\n with open(input_path, 'r') as input_file:\n input_data = input_file.read().strip()\n return input_data\n\n\ndef extract(input_data: str) ->tuple:\n \"\"\"take input data and return the appropriate data structure\"\"\"\n sheet = set()\n folds = list()\n s_instr, f_instr = input_data.split('\\n\\n')\n for line in s_instr.split('\\n'):\n sheet.add(tuple(map(int, line.split(','))))\n for line in f_instr.split('\\n'):\n equal_pos = line.index('=')\n folds.append((line[equal_pos - 1], int(line[equal_pos + 1:])))\n return sheet, folds\n\n\ndef fold(sheet: set, direction: str, axis: int):\n folded = set()\n for x, y in sheet:\n if direction == 'x' and x > axis:\n x = 2 * axis - x\n elif direction == 'y' and y > axis:\n y = 2 * axis - y\n folded.add((x, y))\n return folded\n\n\ndef part1(entries: tuple) ->int:\n \"\"\"part1 solver take the entries and return the part1 solution\"\"\"\n direction, axis = entries[1][0]\n sheet = fold(entries[0], direction, axis)\n return len(sheet)\n\n\ndef part2(entries: tuple) ->str:\n \"\"\"part2 solver take the entries and return the part2 solution\"\"\"\n sheet = entries[0]\n fold_instructions = entries[1]\n for direction, axis in fold_instructions:\n sheet = fold(sheet, direction, axis)\n max_x = max(p[0] for p in sheet)\n max_y = max(p[1] for p in sheet)\n out = ''\n for y in range(max_y + 1):\n for x in range(max_x + 1):\n out += '#' if (x, y) in sheet else ' '\n out += '\\n'\n return out\n\n\ndef test_input_day_13():\n \"\"\"pytest testing function\"\"\"\n entries = extract(TEST_INPUT)\n assert part1(entries) == 17\n\n\ndef test_bench_day_13(benchmark):\n \"\"\"pytest-benchmark function\"\"\"\n benchmark(main)\n\n\ndef main():\n \"\"\"main function\"\"\"\n input_path = str(pathlib.Path(__file__).resolve().parent.parent\n ) + '/inputs/' + str(pathlib.Path(__file__).stem)\n start_time = time.time()\n input_data = read_input(input_path)\n entries = extract(input_data)\n print('Part 1: %d' % part1(entries))\n print('Part 2:\\n%s' % part2(entries))\n end_time = time.time()\n print('Execution time: %f' % (end_time - start_time))\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nimport pathlib\nimport time\nTEST_INPUT = \"\"\"6,10\n0,14\n9,10\n0,3\n10,4\n4,11\n6,0\n6,12\n4,1\n0,13\n10,12\n3,4\n3,0\n8,4\n1,10\n2,14\n8,10\n9,0\n\nfold along y=7\nfold along x=5\"\"\"\n\n\ndef read_input(input_path: str) ->str:\n \"\"\"take input file path and return a str with the file's content\"\"\"\n with open(input_path, 'r') as input_file:\n input_data = input_file.read().strip()\n return input_data\n\n\ndef extract(input_data: str) ->tuple:\n \"\"\"take input data and return the appropriate data structure\"\"\"\n sheet = set()\n folds = list()\n s_instr, f_instr = input_data.split('\\n\\n')\n for line in s_instr.split('\\n'):\n sheet.add(tuple(map(int, line.split(','))))\n for line in f_instr.split('\\n'):\n equal_pos = line.index('=')\n folds.append((line[equal_pos - 1], int(line[equal_pos + 1:])))\n return sheet, folds\n\n\ndef fold(sheet: set, direction: str, axis: int):\n folded = set()\n for x, y in sheet:\n if direction == 'x' and x > axis:\n x = 2 * axis - x\n elif direction == 'y' and y > axis:\n y = 2 * axis - y\n folded.add((x, y))\n return folded\n\n\ndef part1(entries: tuple) ->int:\n \"\"\"part1 solver take the entries and return the part1 solution\"\"\"\n direction, axis = entries[1][0]\n sheet = fold(entries[0], direction, axis)\n return len(sheet)\n\n\ndef part2(entries: tuple) ->str:\n \"\"\"part2 solver take the entries and return the part2 solution\"\"\"\n sheet = entries[0]\n fold_instructions = entries[1]\n for direction, axis in fold_instructions:\n sheet = fold(sheet, direction, axis)\n max_x = max(p[0] for p in sheet)\n max_y = max(p[1] for p in sheet)\n out = ''\n for y in range(max_y + 1):\n for x in range(max_x + 1):\n out += '#' if (x, y) in sheet else ' '\n out += '\\n'\n return out\n\n\ndef test_input_day_13():\n \"\"\"pytest testing function\"\"\"\n entries = extract(TEST_INPUT)\n assert part1(entries) == 17\n\n\ndef test_bench_day_13(benchmark):\n \"\"\"pytest-benchmark function\"\"\"\n benchmark(main)\n\n\ndef main():\n \"\"\"main function\"\"\"\n input_path = str(pathlib.Path(__file__).resolve().parent.parent\n ) + '/inputs/' + str(pathlib.Path(__file__).stem)\n start_time = time.time()\n input_data = read_input(input_path)\n entries = extract(input_data)\n print('Part 1: %d' % part1(entries))\n print('Part 2:\\n%s' % part2(entries))\n end_time = time.time()\n print('Execution time: %f' % (end_time - start_time))\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "\"\"\"AOC Day 13\"\"\"\n\nimport pathlib\nimport time\n\nTEST_INPUT = \"\"\"6,10\n0,14\n9,10\n0,3\n10,4\n4,11\n6,0\n6,12\n4,1\n0,13\n10,12\n3,4\n3,0\n8,4\n1,10\n2,14\n8,10\n9,0\n\nfold along y=7\nfold along x=5\"\"\"\n\ndef read_input(input_path: str) -> str:\n \"\"\"take input file path and return a str with the file's content\"\"\"\n with open(input_path, 'r') as input_file:\n input_data = input_file.read().strip()\n return input_data\n\ndef extract(input_data: str) -> tuple:\n \"\"\"take input data and return the appropriate data structure\"\"\"\n sheet = set()\n folds = list()\n s_instr, f_instr = input_data.split('\\n\\n')\n for line in s_instr.split('\\n'):\n sheet.add(tuple(map(int, line.split(','))))\n for line in f_instr.split('\\n'):\n equal_pos = line.index('=')\n folds.append((line[equal_pos-1], int(line[equal_pos+1:])))\n return (sheet, folds)\n\ndef fold(sheet: set, direction: str, axis: int):\n folded = set()\n\n for x, y in sheet:\n if direction == 'x' and x > axis:\n x = 2 * axis - x\n elif direction == 'y' and y > axis:\n y = 2 * axis - y\n\n folded.add((x, y))\n\n return folded\n\ndef part1(entries: tuple) -> int:\n \"\"\"part1 solver take the entries and return the part1 solution\"\"\"\n direction, axis = entries[1][0]\n sheet = fold(entries[0], direction, axis)\n return len(sheet)\n\ndef part2(entries: tuple) -> str:\n \"\"\"part2 solver take the entries and return the part2 solution\"\"\"\n sheet = entries[0]\n fold_instructions = entries[1]\n for direction, axis in fold_instructions:\n sheet = fold(sheet, direction, axis)\n \n max_x = max(p[0] for p in sheet)\n max_y = max(p[1] for p in sheet)\n out = ''\n for y in range(max_y + 1):\n for x in range(max_x + 1):\n out += '#' if (x, y) in sheet else ' '\n out += '\\n'\n return out\n\ndef test_input_day_13():\n \"\"\"pytest testing function\"\"\"\n entries = extract(TEST_INPUT)\n assert part1(entries) == 17\n\ndef test_bench_day_13(benchmark):\n \"\"\"pytest-benchmark function\"\"\"\n benchmark(main)\n\ndef main():\n \"\"\"main function\"\"\"\n input_path = str(pathlib.Path(__file__).resolve().parent.parent) + \"/inputs/\" + str(pathlib.Path(__file__).stem)\n start_time = time.time()\n input_data = read_input(input_path)\n entries = extract(input_data)\n print(\"Part 1: %d\" % part1(entries))\n print(\"Part 2:\\n%s\" % part2(entries))\n end_time = time.time()\n print(\"Execution time: %f\" % (end_time-start_time))\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
5,
8,
10,
11,
12
]
}
|
[
5,
8,
10,
11,
12
] |
# Generated by Django 3.2.3 on 2021-06-19 11:27
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='BillDetail',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(default=None, max_length=150)),
('CID', models.IntegerField(unique=True)),
('Units', models.PositiveIntegerField(default=None, validators=[django.core.validators.MaxValueValidator(100)])),
('Amount', models.PositiveIntegerField(default=None, validators=[django.core.validators.MaxValueValidator(100)])),
('BillGenerated', models.DateField(auto_now_add=True)),
],
),
]
|
normal
|
{
"blob_id": "b7a8e4105f1c1c532eaae27afae14e9a4f2ddfba",
"index": 2915,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='BillDetail', fields=[('id',\n models.BigAutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('Name', models.CharField(default=None,\n max_length=150)), ('CID', models.IntegerField(unique=True)), (\n 'Units', models.PositiveIntegerField(default=None, validators=[\n django.core.validators.MaxValueValidator(100)])), ('Amount', models\n .PositiveIntegerField(default=None, validators=[django.core.\n validators.MaxValueValidator(100)])), ('BillGenerated', models.\n DateField(auto_now_add=True))])]\n",
"step-4": "import django.core.validators\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='BillDetail', fields=[('id',\n models.BigAutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('Name', models.CharField(default=None,\n max_length=150)), ('CID', models.IntegerField(unique=True)), (\n 'Units', models.PositiveIntegerField(default=None, validators=[\n django.core.validators.MaxValueValidator(100)])), ('Amount', models\n .PositiveIntegerField(default=None, validators=[django.core.\n validators.MaxValueValidator(100)])), ('BillGenerated', models.\n DateField(auto_now_add=True))])]\n",
"step-5": "# Generated by Django 3.2.3 on 2021-06-19 11:27\r\n\r\nimport django.core.validators\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n initial = True\r\n\r\n dependencies = [\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='BillDetail',\r\n fields=[\r\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('Name', models.CharField(default=None, max_length=150)),\r\n ('CID', models.IntegerField(unique=True)),\r\n ('Units', models.PositiveIntegerField(default=None, validators=[django.core.validators.MaxValueValidator(100)])),\r\n ('Amount', models.PositiveIntegerField(default=None, validators=[django.core.validators.MaxValueValidator(100)])),\r\n ('BillGenerated', models.DateField(auto_now_add=True)),\r\n ],\r\n ),\r\n ]\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class NodeTests(unittest.TestCase):
<|reserved_special_token_0|>
def tearDown(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_Add(self):
x1 = core.Input(name='x1', shape=(2, 2))
x2 = core.Input(name='x2', shape=(2, 2))
n = nodes.Add(name='accumulate', num_inputs=2)
n.input_0.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_1.connect(x2)
self.assertIsNone(n.output.shape)
n.transform()
self.assertEqual(n.output.shape, (2, 2))
fx = util.compile(inputs=[x1, x2], outputs=[n.output])
a = np.array([[3, -1], [3, 7]])
b = np.array([[1, 2], [3, 4]])
z = fx(a, b)[0]
np.testing.assert_equal(z, np.array([[4, 1], [6, 11]]))
@unittest.skip('Not fully implemented yet.')
def test_Bincount(self):
x1 = core.Input(name='x1', shape=(None,))
n = nodes.Bincount(name='counter', max_int=3)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=[n.counts])
a = np.array([3, 0, 3, 1])
np.testing.assert_equal(n.counts.value, np.array([0, 0, 0, 0]))
np.testing.assert_equal(fx(a)[0], np.array([1, 1, 0, 2]))
np.testing.assert_equal(fx(a)[0], np.array([2, 2, 0, 4]))
def test_Concatenate(self):
x1 = core.Input(name='x1', shape=(2, 2))
x2 = core.Input(name='x2', shape=(2, 2))
a = np.array([[3, -1], [3, 7]])
b = np.array([[1, 2], [3, 4]])
for axis in range(2):
n = nodes.Concatenate(name='concatenate', num_inputs=2, axis=axis)
n.input_0.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_1.connect(x2)
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=[n.output])
z = fx(a, b)[0]
np.testing.assert_equal(z, np.concatenate([a, b], axis=axis))
def test_Stack(self):
x1 = core.Input(name='x1', shape=(2, 3))
x2 = core.Input(name='x2', shape=(2, 3))
a = np.arange(6).reshape(2, 3)
b = np.arange(6).reshape(2, 3) + 6
for axes in (None, (1, 2, 0), (2, 1, 0)):
n = nodes.Stack(name='stack', num_inputs=2, axes=axes)
n.input_1.connect(x2)
n.input_0.connect(x1)
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=[n.output])
z = fx(a, b)[0]
expected = np.array([a, b])
if axes:
expected = np.transpose(expected, axes)
np.testing.assert_equal(z, expected)
def test_Dimshuffle(self):
x1 = core.Input(name='x1', shape=(2, 3))
a = np.zeros([2, 3])
axes = [('x', 0, 1), (0, 1, 'x'), (1, 'x', 0)]
shapes = [(1, 2, 3), (2, 3, 1), (3, 1, 2)]
for ax, shp in zip(axes, shapes):
n = nodes.Dimshuffle('dimshuffle', ax)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0].shape, shp)
def test_Slice(self):
x1 = core.Input(name='x1', shape=(2, 3))
a = np.arange(6).reshape(2, 3)
slices = [(None, 1), (0, None), (1, 0)]
ans = [a[:, 1], a[0, :], a[1, 0]]
for slc, ans in zip(slices, ans):
n = nodes.Slice('slice', slc)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], ans)
<|reserved_special_token_0|>
def test_Multiply(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [3, 7]])
for w, shp in zip([-1, a], [None, a.shape]):
n = nodes.Multiply(name='gain', weight_shape=shp)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], np.zeros_like(a))
n.weight.value = w
np.testing.assert_equal(fx(a)[0], w * a)
n = nodes.Multiply(name='gain', weight_shape=(1, 2), broadcast=[0])
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], np.zeros_like(a))
n.weight.value = a[0].reshape(1, -1)
np.testing.assert_equal(fx(a)[0], a * a[0].reshape(1, -1))
<|reserved_special_token_0|>
def test_Min(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = -1, np.array([3, -1]), np.array([-1, 4])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Min('min', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Sum(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 13, np.array([7, 6]), np.array([2, 11])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Sum('sum', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Mean(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 13 / 4.0, np.array([7, 6]) / 2.0, np.array([2, 11]) / 2.0
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Mean('mean', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_NormalizeDim(self):
x1 = core.Input(name='x1', shape=(1, 2, 3))
a = np.array([[[3, 1, -1], [4, 0, 7]]], dtype=np.float32)
expected = [np.sign(a), a / np.sqrt(np.array([25, 1, 50])).reshape(
1, 1, 3), a / np.sqrt(np.array([11, 65])).reshape(1, 2, 1)]
for axis, ans in enumerate(expected):
n = nodes.NormalizeDim('l2norm', axis=axis, mode='l2')
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_almost_equal(fx(a)[0], ans)
<|reserved_special_token_0|>
def test_SquaredEuclidean(self):
a1 = np.array([[3, -1], [4, 7]])
b1 = np.array([[1, -1], [4, 7]])
a2 = np.array([3, -1])
b2 = np.array([1, -1])
z1 = np.power(a1 - b1, 2.0).sum(axis=1)
z2 = np.power(a2 - b2, 2.0).sum()
for a, b, z in zip([a1, a2], [b1, b2], [z1, z2]):
x1 = core.Input(name='x1', shape=a.shape)
x2 = core.Input(name='x2', shape=b.shape)
n = nodes.SquaredEuclidean('sqeuclid')
n.input_a.connect(x1)
n.input_b.connect(x2)
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, b)[0], z)
def test_Product(self):
a1 = np.array([[3, -1], [4, 7]])
b1 = np.array([[1, -1], [4, 7]])
a2 = np.array([3, -1])
b2 = np.array([1, -1])
for a, b in zip([a1, a2], [b1, b2]):
x1 = core.Input(name='x1', shape=a.shape)
x2 = core.Input(name='x2', shape=b.shape)
n = nodes.Product('product')
n.input_a.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_b.connect(x2)
self.assertTrue(n.is_ready())
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, b)[0], a * b)
<|reserved_special_token_0|>
def test_Affine_relu(self):
x1 = core.Input(name='x1', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
b = np.ones(3)
n = nodes.Affine(name='affine', input_shape=(None, 2), output_shape
=(None, 3), act_type='relu')
n.weights.value = w
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], __relu__(np.dot(a, w) + b))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_Conv3D_relu(self):
x1 = core.Input(name='x1', shape=(None, 1, 2, 3))
a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)
w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)
b = np.arange(3)
z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(
w, b)]])
a = a.reshape(1, 1, 2, 3)
z = z.reshape(1, 3, 1, 3)
n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),
weight_shape=(3, 1, 2, 1), act_type='relu')
n.weights.value = w.reshape(3, 1, 2, 1)
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], __relu__(z))
def test_Conv3D_dropout(self):
x1 = core.Input(name='x1', shape=(None, 1, 2, 3))
dropout = core.Input(name='dropout', shape=None)
a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)
w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)
b = np.arange(3)
z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(
w, b)]])
a = a.reshape(1, 1, 2, 3)
z = z.reshape(1, 3, 1, 3)
n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),
weight_shape=(3, 1, 2, 1), act_type='linear')
n.enable_dropout()
n.weights.value = w.reshape(3, 1, 2, 1)
n.bias.value = b
n.input.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.dropout.connect(dropout)
n.transform()
fx = util.compile(inputs=[x1, dropout], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, 0.0)[0], z)
self.assertGreaterEqual(np.equal(fx(a, 0.9)[0], 0.0).sum(), 1)
def test_RadialBasis(self):
x = core.Input(name='x', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
n = nodes.RadialBasis(name='radial', input_shape=x.shape,
output_shape=(None, 3))
n.weights.value = w.reshape(2, 3)
n.input.connect(x)
n.transform()
fx = util.compile(inputs=[x], outputs=n.outputs.values())
z = np.power(a.reshape(2, 2, 1) - w.reshape(1, 2, 3), 2.0).sum(axis=1)
np.testing.assert_equal(fx(a)[0], z)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class NodeTests(unittest.TestCase):
<|reserved_special_token_0|>
def tearDown(self):
pass
def test_Node(self):
pass
<|reserved_special_token_0|>
def test_Add(self):
x1 = core.Input(name='x1', shape=(2, 2))
x2 = core.Input(name='x2', shape=(2, 2))
n = nodes.Add(name='accumulate', num_inputs=2)
n.input_0.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_1.connect(x2)
self.assertIsNone(n.output.shape)
n.transform()
self.assertEqual(n.output.shape, (2, 2))
fx = util.compile(inputs=[x1, x2], outputs=[n.output])
a = np.array([[3, -1], [3, 7]])
b = np.array([[1, 2], [3, 4]])
z = fx(a, b)[0]
np.testing.assert_equal(z, np.array([[4, 1], [6, 11]]))
@unittest.skip('Not fully implemented yet.')
def test_Bincount(self):
x1 = core.Input(name='x1', shape=(None,))
n = nodes.Bincount(name='counter', max_int=3)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=[n.counts])
a = np.array([3, 0, 3, 1])
np.testing.assert_equal(n.counts.value, np.array([0, 0, 0, 0]))
np.testing.assert_equal(fx(a)[0], np.array([1, 1, 0, 2]))
np.testing.assert_equal(fx(a)[0], np.array([2, 2, 0, 4]))
def test_Concatenate(self):
x1 = core.Input(name='x1', shape=(2, 2))
x2 = core.Input(name='x2', shape=(2, 2))
a = np.array([[3, -1], [3, 7]])
b = np.array([[1, 2], [3, 4]])
for axis in range(2):
n = nodes.Concatenate(name='concatenate', num_inputs=2, axis=axis)
n.input_0.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_1.connect(x2)
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=[n.output])
z = fx(a, b)[0]
np.testing.assert_equal(z, np.concatenate([a, b], axis=axis))
def test_Stack(self):
x1 = core.Input(name='x1', shape=(2, 3))
x2 = core.Input(name='x2', shape=(2, 3))
a = np.arange(6).reshape(2, 3)
b = np.arange(6).reshape(2, 3) + 6
for axes in (None, (1, 2, 0), (2, 1, 0)):
n = nodes.Stack(name='stack', num_inputs=2, axes=axes)
n.input_1.connect(x2)
n.input_0.connect(x1)
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=[n.output])
z = fx(a, b)[0]
expected = np.array([a, b])
if axes:
expected = np.transpose(expected, axes)
np.testing.assert_equal(z, expected)
def test_Dimshuffle(self):
x1 = core.Input(name='x1', shape=(2, 3))
a = np.zeros([2, 3])
axes = [('x', 0, 1), (0, 1, 'x'), (1, 'x', 0)]
shapes = [(1, 2, 3), (2, 3, 1), (3, 1, 2)]
for ax, shp in zip(axes, shapes):
n = nodes.Dimshuffle('dimshuffle', ax)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0].shape, shp)
def test_Slice(self):
x1 = core.Input(name='x1', shape=(2, 3))
a = np.arange(6).reshape(2, 3)
slices = [(None, 1), (0, None), (1, 0)]
ans = [a[:, 1], a[0, :], a[1, 0]]
for slc, ans in zip(slices, ans):
n = nodes.Slice('slice', slc)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], ans)
<|reserved_special_token_0|>
def test_Multiply(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [3, 7]])
for w, shp in zip([-1, a], [None, a.shape]):
n = nodes.Multiply(name='gain', weight_shape=shp)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], np.zeros_like(a))
n.weight.value = w
np.testing.assert_equal(fx(a)[0], w * a)
n = nodes.Multiply(name='gain', weight_shape=(1, 2), broadcast=[0])
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], np.zeros_like(a))
n.weight.value = a[0].reshape(1, -1)
np.testing.assert_equal(fx(a)[0], a * a[0].reshape(1, -1))
<|reserved_special_token_0|>
def test_Min(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = -1, np.array([3, -1]), np.array([-1, 4])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Min('min', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Sum(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 13, np.array([7, 6]), np.array([2, 11])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Sum('sum', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Mean(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 13 / 4.0, np.array([7, 6]) / 2.0, np.array([2, 11]) / 2.0
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Mean('mean', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_NormalizeDim(self):
x1 = core.Input(name='x1', shape=(1, 2, 3))
a = np.array([[[3, 1, -1], [4, 0, 7]]], dtype=np.float32)
expected = [np.sign(a), a / np.sqrt(np.array([25, 1, 50])).reshape(
1, 1, 3), a / np.sqrt(np.array([11, 65])).reshape(1, 2, 1)]
for axis, ans in enumerate(expected):
n = nodes.NormalizeDim('l2norm', axis=axis, mode='l2')
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_almost_equal(fx(a)[0], ans)
<|reserved_special_token_0|>
def test_SquaredEuclidean(self):
a1 = np.array([[3, -1], [4, 7]])
b1 = np.array([[1, -1], [4, 7]])
a2 = np.array([3, -1])
b2 = np.array([1, -1])
z1 = np.power(a1 - b1, 2.0).sum(axis=1)
z2 = np.power(a2 - b2, 2.0).sum()
for a, b, z in zip([a1, a2], [b1, b2], [z1, z2]):
x1 = core.Input(name='x1', shape=a.shape)
x2 = core.Input(name='x2', shape=b.shape)
n = nodes.SquaredEuclidean('sqeuclid')
n.input_a.connect(x1)
n.input_b.connect(x2)
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, b)[0], z)
def test_Product(self):
a1 = np.array([[3, -1], [4, 7]])
b1 = np.array([[1, -1], [4, 7]])
a2 = np.array([3, -1])
b2 = np.array([1, -1])
for a, b in zip([a1, a2], [b1, b2]):
x1 = core.Input(name='x1', shape=a.shape)
x2 = core.Input(name='x2', shape=b.shape)
n = nodes.Product('product')
n.input_a.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_b.connect(x2)
self.assertTrue(n.is_ready())
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, b)[0], a * b)
<|reserved_special_token_0|>
def test_Affine_relu(self):
x1 = core.Input(name='x1', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
b = np.ones(3)
n = nodes.Affine(name='affine', input_shape=(None, 2), output_shape
=(None, 3), act_type='relu')
n.weights.value = w
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], __relu__(np.dot(a, w) + b))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_Conv3D_relu(self):
x1 = core.Input(name='x1', shape=(None, 1, 2, 3))
a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)
w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)
b = np.arange(3)
z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(
w, b)]])
a = a.reshape(1, 1, 2, 3)
z = z.reshape(1, 3, 1, 3)
n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),
weight_shape=(3, 1, 2, 1), act_type='relu')
n.weights.value = w.reshape(3, 1, 2, 1)
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], __relu__(z))
def test_Conv3D_dropout(self):
x1 = core.Input(name='x1', shape=(None, 1, 2, 3))
dropout = core.Input(name='dropout', shape=None)
a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)
w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)
b = np.arange(3)
z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(
w, b)]])
a = a.reshape(1, 1, 2, 3)
z = z.reshape(1, 3, 1, 3)
n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),
weight_shape=(3, 1, 2, 1), act_type='linear')
n.enable_dropout()
n.weights.value = w.reshape(3, 1, 2, 1)
n.bias.value = b
n.input.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.dropout.connect(dropout)
n.transform()
fx = util.compile(inputs=[x1, dropout], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, 0.0)[0], z)
self.assertGreaterEqual(np.equal(fx(a, 0.9)[0], 0.0).sum(), 1)
def test_RadialBasis(self):
x = core.Input(name='x', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
n = nodes.RadialBasis(name='radial', input_shape=x.shape,
output_shape=(None, 3))
n.weights.value = w.reshape(2, 3)
n.input.connect(x)
n.transform()
fx = util.compile(inputs=[x], outputs=n.outputs.values())
z = np.power(a.reshape(2, 2, 1) - w.reshape(1, 2, 3), 2.0).sum(axis=1)
np.testing.assert_equal(fx(a)[0], z)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class NodeTests(unittest.TestCase):
<|reserved_special_token_0|>
def tearDown(self):
pass
def test_Node(self):
pass
<|reserved_special_token_0|>
def test_Add(self):
x1 = core.Input(name='x1', shape=(2, 2))
x2 = core.Input(name='x2', shape=(2, 2))
n = nodes.Add(name='accumulate', num_inputs=2)
n.input_0.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_1.connect(x2)
self.assertIsNone(n.output.shape)
n.transform()
self.assertEqual(n.output.shape, (2, 2))
fx = util.compile(inputs=[x1, x2], outputs=[n.output])
a = np.array([[3, -1], [3, 7]])
b = np.array([[1, 2], [3, 4]])
z = fx(a, b)[0]
np.testing.assert_equal(z, np.array([[4, 1], [6, 11]]))
@unittest.skip('Not fully implemented yet.')
def test_Bincount(self):
x1 = core.Input(name='x1', shape=(None,))
n = nodes.Bincount(name='counter', max_int=3)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=[n.counts])
a = np.array([3, 0, 3, 1])
np.testing.assert_equal(n.counts.value, np.array([0, 0, 0, 0]))
np.testing.assert_equal(fx(a)[0], np.array([1, 1, 0, 2]))
np.testing.assert_equal(fx(a)[0], np.array([2, 2, 0, 4]))
def test_Concatenate(self):
x1 = core.Input(name='x1', shape=(2, 2))
x2 = core.Input(name='x2', shape=(2, 2))
a = np.array([[3, -1], [3, 7]])
b = np.array([[1, 2], [3, 4]])
for axis in range(2):
n = nodes.Concatenate(name='concatenate', num_inputs=2, axis=axis)
n.input_0.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_1.connect(x2)
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=[n.output])
z = fx(a, b)[0]
np.testing.assert_equal(z, np.concatenate([a, b], axis=axis))
def test_Stack(self):
x1 = core.Input(name='x1', shape=(2, 3))
x2 = core.Input(name='x2', shape=(2, 3))
a = np.arange(6).reshape(2, 3)
b = np.arange(6).reshape(2, 3) + 6
for axes in (None, (1, 2, 0), (2, 1, 0)):
n = nodes.Stack(name='stack', num_inputs=2, axes=axes)
n.input_1.connect(x2)
n.input_0.connect(x1)
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=[n.output])
z = fx(a, b)[0]
expected = np.array([a, b])
if axes:
expected = np.transpose(expected, axes)
np.testing.assert_equal(z, expected)
def test_Dimshuffle(self):
x1 = core.Input(name='x1', shape=(2, 3))
a = np.zeros([2, 3])
axes = [('x', 0, 1), (0, 1, 'x'), (1, 'x', 0)]
shapes = [(1, 2, 3), (2, 3, 1), (3, 1, 2)]
for ax, shp in zip(axes, shapes):
n = nodes.Dimshuffle('dimshuffle', ax)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0].shape, shp)
def test_Slice(self):
x1 = core.Input(name='x1', shape=(2, 3))
a = np.arange(6).reshape(2, 3)
slices = [(None, 1), (0, None), (1, 0)]
ans = [a[:, 1], a[0, :], a[1, 0]]
for slc, ans in zip(slices, ans):
n = nodes.Slice('slice', slc)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], ans)
<|reserved_special_token_0|>
def test_Multiply(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [3, 7]])
for w, shp in zip([-1, a], [None, a.shape]):
n = nodes.Multiply(name='gain', weight_shape=shp)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], np.zeros_like(a))
n.weight.value = w
np.testing.assert_equal(fx(a)[0], w * a)
n = nodes.Multiply(name='gain', weight_shape=(1, 2), broadcast=[0])
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], np.zeros_like(a))
n.weight.value = a[0].reshape(1, -1)
np.testing.assert_equal(fx(a)[0], a * a[0].reshape(1, -1))
def test_Max(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 7, np.array([4, 7]), np.array([3, 7])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Max('max', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Min(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = -1, np.array([3, -1]), np.array([-1, 4])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Min('min', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Sum(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 13, np.array([7, 6]), np.array([2, 11])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Sum('sum', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Mean(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 13 / 4.0, np.array([7, 6]) / 2.0, np.array([2, 11]) / 2.0
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Mean('mean', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_NormalizeDim(self):
x1 = core.Input(name='x1', shape=(1, 2, 3))
a = np.array([[[3, 1, -1], [4, 0, 7]]], dtype=np.float32)
expected = [np.sign(a), a / np.sqrt(np.array([25, 1, 50])).reshape(
1, 1, 3), a / np.sqrt(np.array([11, 65])).reshape(1, 2, 1)]
for axis, ans in enumerate(expected):
n = nodes.NormalizeDim('l2norm', axis=axis, mode='l2')
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_almost_equal(fx(a)[0], ans)
def test_SelectIndex(self):
x1 = core.Input(name='x1', shape=(None, 2))
idx = core.Input(name='idx', shape=(None,), dtype='int32')
a = np.array([[3, -1], [4, 7]])
i = np.array([1, 0])
n = nodes.SelectIndex('select')
n.input.connect(x1)
n.index.connect(idx)
n.transform()
fx = util.compile(inputs=[x1, idx], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, i)[0], np.array([-1, 4]))
def test_SquaredEuclidean(self):
a1 = np.array([[3, -1], [4, 7]])
b1 = np.array([[1, -1], [4, 7]])
a2 = np.array([3, -1])
b2 = np.array([1, -1])
z1 = np.power(a1 - b1, 2.0).sum(axis=1)
z2 = np.power(a2 - b2, 2.0).sum()
for a, b, z in zip([a1, a2], [b1, b2], [z1, z2]):
x1 = core.Input(name='x1', shape=a.shape)
x2 = core.Input(name='x2', shape=b.shape)
n = nodes.SquaredEuclidean('sqeuclid')
n.input_a.connect(x1)
n.input_b.connect(x2)
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, b)[0], z)
def test_Product(self):
a1 = np.array([[3, -1], [4, 7]])
b1 = np.array([[1, -1], [4, 7]])
a2 = np.array([3, -1])
b2 = np.array([1, -1])
for a, b in zip([a1, a2], [b1, b2]):
x1 = core.Input(name='x1', shape=a.shape)
x2 = core.Input(name='x2', shape=b.shape)
n = nodes.Product('product')
n.input_a.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_b.connect(x2)
self.assertTrue(n.is_ready())
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, b)[0], a * b)
def test_Affine_linear(self):
x1 = core.Input(name='x1', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
b = np.ones(3)
n = nodes.Affine(name='affine', input_shape=(None, 2), output_shape
=(None, 3), act_type='linear')
n.weights.value = w
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], np.dot(a, w) + b)
def test_Affine_relu(self):
x1 = core.Input(name='x1', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
b = np.ones(3)
n = nodes.Affine(name='affine', input_shape=(None, 2), output_shape
=(None, 3), act_type='relu')
n.weights.value = w
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], __relu__(np.dot(a, w) + b))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_Conv3D_relu(self):
x1 = core.Input(name='x1', shape=(None, 1, 2, 3))
a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)
w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)
b = np.arange(3)
z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(
w, b)]])
a = a.reshape(1, 1, 2, 3)
z = z.reshape(1, 3, 1, 3)
n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),
weight_shape=(3, 1, 2, 1), act_type='relu')
n.weights.value = w.reshape(3, 1, 2, 1)
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], __relu__(z))
def test_Conv3D_dropout(self):
x1 = core.Input(name='x1', shape=(None, 1, 2, 3))
dropout = core.Input(name='dropout', shape=None)
a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)
w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)
b = np.arange(3)
z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(
w, b)]])
a = a.reshape(1, 1, 2, 3)
z = z.reshape(1, 3, 1, 3)
n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),
weight_shape=(3, 1, 2, 1), act_type='linear')
n.enable_dropout()
n.weights.value = w.reshape(3, 1, 2, 1)
n.bias.value = b
n.input.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.dropout.connect(dropout)
n.transform()
fx = util.compile(inputs=[x1, dropout], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, 0.0)[0], z)
self.assertGreaterEqual(np.equal(fx(a, 0.9)[0], 0.0).sum(), 1)
def test_RadialBasis(self):
x = core.Input(name='x', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
n = nodes.RadialBasis(name='radial', input_shape=x.shape,
output_shape=(None, 3))
n.weights.value = w.reshape(2, 3)
n.input.connect(x)
n.transform()
fx = util.compile(inputs=[x], outputs=n.outputs.values())
z = np.power(a.reshape(2, 2, 1) - w.reshape(1, 2, 3), 2.0).sum(axis=1)
np.testing.assert_equal(fx(a)[0], z)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class NodeTests(unittest.TestCase):
<|reserved_special_token_0|>
def tearDown(self):
pass
def test_Node(self):
pass
def test_Constant(self):
n = nodes.Constant(name='test', shape=None)
n.data.value = 1.0
n.transform()
fx = util.compile(inputs=[], outputs=[n.output])
np.testing.assert_equal(np.array(fx()[0]), 1.0)
def test_Add(self):
x1 = core.Input(name='x1', shape=(2, 2))
x2 = core.Input(name='x2', shape=(2, 2))
n = nodes.Add(name='accumulate', num_inputs=2)
n.input_0.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_1.connect(x2)
self.assertIsNone(n.output.shape)
n.transform()
self.assertEqual(n.output.shape, (2, 2))
fx = util.compile(inputs=[x1, x2], outputs=[n.output])
a = np.array([[3, -1], [3, 7]])
b = np.array([[1, 2], [3, 4]])
z = fx(a, b)[0]
np.testing.assert_equal(z, np.array([[4, 1], [6, 11]]))
@unittest.skip('Not fully implemented yet.')
def test_Bincount(self):
x1 = core.Input(name='x1', shape=(None,))
n = nodes.Bincount(name='counter', max_int=3)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=[n.counts])
a = np.array([3, 0, 3, 1])
np.testing.assert_equal(n.counts.value, np.array([0, 0, 0, 0]))
np.testing.assert_equal(fx(a)[0], np.array([1, 1, 0, 2]))
np.testing.assert_equal(fx(a)[0], np.array([2, 2, 0, 4]))
def test_Concatenate(self):
x1 = core.Input(name='x1', shape=(2, 2))
x2 = core.Input(name='x2', shape=(2, 2))
a = np.array([[3, -1], [3, 7]])
b = np.array([[1, 2], [3, 4]])
for axis in range(2):
n = nodes.Concatenate(name='concatenate', num_inputs=2, axis=axis)
n.input_0.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_1.connect(x2)
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=[n.output])
z = fx(a, b)[0]
np.testing.assert_equal(z, np.concatenate([a, b], axis=axis))
def test_Stack(self):
x1 = core.Input(name='x1', shape=(2, 3))
x2 = core.Input(name='x2', shape=(2, 3))
a = np.arange(6).reshape(2, 3)
b = np.arange(6).reshape(2, 3) + 6
for axes in (None, (1, 2, 0), (2, 1, 0)):
n = nodes.Stack(name='stack', num_inputs=2, axes=axes)
n.input_1.connect(x2)
n.input_0.connect(x1)
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=[n.output])
z = fx(a, b)[0]
expected = np.array([a, b])
if axes:
expected = np.transpose(expected, axes)
np.testing.assert_equal(z, expected)
def test_Dimshuffle(self):
x1 = core.Input(name='x1', shape=(2, 3))
a = np.zeros([2, 3])
axes = [('x', 0, 1), (0, 1, 'x'), (1, 'x', 0)]
shapes = [(1, 2, 3), (2, 3, 1), (3, 1, 2)]
for ax, shp in zip(axes, shapes):
n = nodes.Dimshuffle('dimshuffle', ax)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0].shape, shp)
def test_Slice(self):
x1 = core.Input(name='x1', shape=(2, 3))
a = np.arange(6).reshape(2, 3)
slices = [(None, 1), (0, None), (1, 0)]
ans = [a[:, 1], a[0, :], a[1, 0]]
for slc, ans in zip(slices, ans):
n = nodes.Slice('slice', slc)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], ans)
<|reserved_special_token_0|>
def test_Multiply(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [3, 7]])
for w, shp in zip([-1, a], [None, a.shape]):
n = nodes.Multiply(name='gain', weight_shape=shp)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], np.zeros_like(a))
n.weight.value = w
np.testing.assert_equal(fx(a)[0], w * a)
n = nodes.Multiply(name='gain', weight_shape=(1, 2), broadcast=[0])
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], np.zeros_like(a))
n.weight.value = a[0].reshape(1, -1)
np.testing.assert_equal(fx(a)[0], a * a[0].reshape(1, -1))
def test_Max(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 7, np.array([4, 7]), np.array([3, 7])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Max('max', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Min(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = -1, np.array([3, -1]), np.array([-1, 4])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Min('min', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Sum(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 13, np.array([7, 6]), np.array([2, 11])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Sum('sum', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Mean(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 13 / 4.0, np.array([7, 6]) / 2.0, np.array([2, 11]) / 2.0
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Mean('mean', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_NormalizeDim(self):
x1 = core.Input(name='x1', shape=(1, 2, 3))
a = np.array([[[3, 1, -1], [4, 0, 7]]], dtype=np.float32)
expected = [np.sign(a), a / np.sqrt(np.array([25, 1, 50])).reshape(
1, 1, 3), a / np.sqrt(np.array([11, 65])).reshape(1, 2, 1)]
for axis, ans in enumerate(expected):
n = nodes.NormalizeDim('l2norm', axis=axis, mode='l2')
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.
values())
np.testing.assert_almost_equal(fx(a)[0], ans)
def test_SelectIndex(self):
x1 = core.Input(name='x1', shape=(None, 2))
idx = core.Input(name='idx', shape=(None,), dtype='int32')
a = np.array([[3, -1], [4, 7]])
i = np.array([1, 0])
n = nodes.SelectIndex('select')
n.input.connect(x1)
n.index.connect(idx)
n.transform()
fx = util.compile(inputs=[x1, idx], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, i)[0], np.array([-1, 4]))
def test_SquaredEuclidean(self):
a1 = np.array([[3, -1], [4, 7]])
b1 = np.array([[1, -1], [4, 7]])
a2 = np.array([3, -1])
b2 = np.array([1, -1])
z1 = np.power(a1 - b1, 2.0).sum(axis=1)
z2 = np.power(a2 - b2, 2.0).sum()
for a, b, z in zip([a1, a2], [b1, b2], [z1, z2]):
x1 = core.Input(name='x1', shape=a.shape)
x2 = core.Input(name='x2', shape=b.shape)
n = nodes.SquaredEuclidean('sqeuclid')
n.input_a.connect(x1)
n.input_b.connect(x2)
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, b)[0], z)
def test_Product(self):
a1 = np.array([[3, -1], [4, 7]])
b1 = np.array([[1, -1], [4, 7]])
a2 = np.array([3, -1])
b2 = np.array([1, -1])
for a, b in zip([a1, a2], [b1, b2]):
x1 = core.Input(name='x1', shape=a.shape)
x2 = core.Input(name='x2', shape=b.shape)
n = nodes.Product('product')
n.input_a.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_b.connect(x2)
self.assertTrue(n.is_ready())
n.transform()
fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, b)[0], a * b)
def test_Affine_linear(self):
x1 = core.Input(name='x1', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
b = np.ones(3)
n = nodes.Affine(name='affine', input_shape=(None, 2), output_shape
=(None, 3), act_type='linear')
n.weights.value = w
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], np.dot(a, w) + b)
def test_Affine_relu(self):
x1 = core.Input(name='x1', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
b = np.ones(3)
n = nodes.Affine(name='affine', input_shape=(None, 2), output_shape
=(None, 3), act_type='relu')
n.weights.value = w
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], __relu__(np.dot(a, w) + b))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_Conv3D_relu(self):
x1 = core.Input(name='x1', shape=(None, 1, 2, 3))
a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)
w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)
b = np.arange(3)
z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(
w, b)]])
a = a.reshape(1, 1, 2, 3)
z = z.reshape(1, 3, 1, 3)
n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),
weight_shape=(3, 1, 2, 1), act_type='relu')
n.weights.value = w.reshape(3, 1, 2, 1)
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], __relu__(z))
def test_Conv3D_dropout(self):
x1 = core.Input(name='x1', shape=(None, 1, 2, 3))
dropout = core.Input(name='dropout', shape=None)
a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)
w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)
b = np.arange(3)
z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(
w, b)]])
a = a.reshape(1, 1, 2, 3)
z = z.reshape(1, 3, 1, 3)
n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),
weight_shape=(3, 1, 2, 1), act_type='linear')
n.enable_dropout()
n.weights.value = w.reshape(3, 1, 2, 1)
n.bias.value = b
n.input.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.dropout.connect(dropout)
n.transform()
fx = util.compile(inputs=[x1, dropout], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, 0.0)[0], z)
self.assertGreaterEqual(np.equal(fx(a, 0.9)[0], 0.0).sum(), 1)
def test_RadialBasis(self):
x = core.Input(name='x', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
n = nodes.RadialBasis(name='radial', input_shape=x.shape,
output_shape=(None, 3))
n.weights.value = w.reshape(2, 3)
n.input.connect(x)
n.transform()
fx = util.compile(inputs=[x], outputs=n.outputs.values())
z = np.power(a.reshape(2, 2, 1) - w.reshape(1, 2, 3), 2.0).sum(axis=1)
np.testing.assert_equal(fx(a)[0], z)
def test_SliceGT(self):
x = core.Input(name='x', shape=(None,))
n = nodes.SliceGT(name='slice-greater', value=0)
n.input.connect(x)
n.transform()
fx = util.compile(inputs=[x], outputs=n.outputs.values())
a = np.array([1, -2, 0])
np.testing.assert_equal(fx(a)[0], np.array([1]))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
"""Tests for Node objects."""
import numpy as np
import unittest
import optimus.core as core
import optimus.nodes as nodes
import optimus.util as util
def __relu__(x):
"Numpy Rectified Linear Unit."
return 0.5 * (np.abs(x) + x)
class NodeTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_Node(self):
pass
def test_Constant(self):
n = nodes.Constant(name='test', shape=None)
n.data.value = 1.0
n.transform()
fx = util.compile(inputs=[], outputs=[n.output])
np.testing.assert_equal(np.array(fx()[0]), 1.0)
def test_Add(self):
x1 = core.Input(name='x1', shape=(2, 2))
x2 = core.Input(name='x2', shape=(2, 2))
n = nodes.Add(name='accumulate', num_inputs=2)
n.input_0.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_1.connect(x2)
self.assertIsNone(n.output.shape)
n.transform()
self.assertEqual(n.output.shape, (2, 2))
fx = util.compile(inputs=[x1, x2],
outputs=[n.output])
a = np.array([[3, -1], [3, 7]])
b = np.array([[1, 2], [3, 4]])
z = fx(a, b)[0]
np.testing.assert_equal(z, np.array([[4, 1], [6, 11]]))
@unittest.skip("Not fully implemented yet.")
def test_Bincount(self):
x1 = core.Input(name='x1', shape=(None,))
n = nodes.Bincount(name='counter', max_int=3)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=[n.counts])
a = np.array([3, 0, 3, 1])
np.testing.assert_equal(n.counts.value, np.array([0, 0, 0, 0]))
np.testing.assert_equal(fx(a)[0], np.array([1, 1, 0, 2]))
np.testing.assert_equal(fx(a)[0], np.array([2, 2, 0, 4]))
def test_Concatenate(self):
x1 = core.Input(name='x1', shape=(2, 2))
x2 = core.Input(name='x2', shape=(2, 2))
a = np.array([[3, -1], [3, 7]])
b = np.array([[1, 2], [3, 4]])
for axis in range(2):
n = nodes.Concatenate(name='concatenate', num_inputs=2, axis=axis)
n.input_0.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_1.connect(x2)
n.transform()
fx = util.compile(inputs=[x1, x2],
outputs=[n.output])
z = fx(a, b)[0]
np.testing.assert_equal(z, np.concatenate([a, b], axis=axis))
def test_Stack(self):
x1 = core.Input(name='x1', shape=(2, 3))
x2 = core.Input(name='x2', shape=(2, 3))
a = np.arange(6).reshape(2, 3)
b = np.arange(6).reshape(2, 3) + 6
for axes in None, (1, 2, 0), (2, 1, 0):
n = nodes.Stack(name='stack', num_inputs=2, axes=axes)
n.input_1.connect(x2)
n.input_0.connect(x1)
n.transform()
fx = util.compile(inputs=[x1, x2],
outputs=[n.output])
z = fx(a, b)[0]
expected = np.array([a, b])
if axes:
expected = np.transpose(expected, axes)
np.testing.assert_equal(z, expected)
def test_Dimshuffle(self):
x1 = core.Input(name='x1', shape=(2, 3))
a = np.zeros([2, 3])
axes = [('x', 0, 1), (0, 1, 'x'), (1, 'x', 0)]
shapes = [(1, 2, 3), (2, 3, 1), (3, 1, 2)]
for ax, shp in zip(axes, shapes):
n = nodes.Dimshuffle('dimshuffle', ax)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(),
outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0].shape, shp)
def test_Slice(self):
x1 = core.Input(name='x1', shape=(2, 3))
a = np.arange(6).reshape(2, 3)
slices = [(None, 1), (0, None), (1, 0)]
ans = [a[:, 1], a[0, :], a[1, 0]]
for slc, ans in zip(slices, ans):
n = nodes.Slice('slice', slc)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(),
outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], ans)
def test_Log(self):
x1 = core.Input(name='x1', shape=(2, 2))
log = nodes.Log('log')
log.input.connect(x1)
log.transform()
fx = util.compile(inputs=log.inputs.values(),
outputs=log.outputs.values())
a = np.array([[3, 1], [4, 7]], dtype=np.float32)
z = fx(a)[0]
np.testing.assert_almost_equal(z, np.log(a))
def test_Multiply(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [3, 7]])
for w, shp in zip([-1, a], [None, a.shape]):
n = nodes.Multiply(name='gain', weight_shape=shp)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(),
outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], np.zeros_like(a))
n.weight.value = w
np.testing.assert_equal(fx(a)[0], w*a)
n = nodes.Multiply(name='gain', weight_shape=(1, 2), broadcast=[0])
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(),
outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], np.zeros_like(a))
n.weight.value = a[0].reshape(1, -1)
np.testing.assert_equal(fx(a)[0], a*a[0].reshape(1, -1))
def test_Max(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 7, np.array([4, 7]), np.array([3, 7])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Max('max', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(),
outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Min(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = -1, np.array([3, -1]), np.array([-1, 4])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Min('min', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(),
outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Sum(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 13, np.array([7, 6]), np.array([2, 11])
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Sum('sum', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(),
outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_Mean(self):
x1 = core.Input(name='x1', shape=(2, 2))
a = np.array([[3, -1], [4, 7]])
res = 13 / 4.0, np.array([7, 6]) / 2.0, np.array([2, 11]) / 2.0
for idx, axis in enumerate([None, 0, 1]):
n = nodes.Mean('mean', axis=axis)
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(),
outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], res[idx])
def test_NormalizeDim(self):
x1 = core.Input(name='x1', shape=(1, 2, 3))
a = np.array([[[3, 1, -1], [4, 0, 7]]], dtype=np.float32)
expected = [np.sign(a),
a / np.sqrt(np.array([25, 1, 50])).reshape(1, 1, 3),
a / np.sqrt(np.array([11, 65])).reshape(1, 2, 1)]
for axis, ans in enumerate(expected):
n = nodes.NormalizeDim('l2norm', axis=axis, mode='l2')
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=n.inputs.values(),
outputs=n.outputs.values())
np.testing.assert_almost_equal(fx(a)[0], ans)
def test_SelectIndex(self):
x1 = core.Input(name='x1', shape=(None, 2))
idx = core.Input(name='idx', shape=(None,), dtype='int32')
a = np.array([[3, -1], [4, 7]])
i = np.array([1, 0])
n = nodes.SelectIndex('select')
n.input.connect(x1)
n.index.connect(idx)
n.transform()
fx = util.compile(inputs=[x1, idx],
outputs=n.outputs.values())
np.testing.assert_equal(fx(a, i)[0], np.array([-1, 4]))
def test_SquaredEuclidean(self):
a1 = np.array([[3, -1], [4, 7]])
b1 = np.array([[1, -1], [4, 7]])
a2 = np.array([3, -1])
b2 = np.array([1, -1])
z1 = np.power(a1 - b1, 2.0).sum(axis=1)
z2 = np.power(a2 - b2, 2.0).sum()
for a, b, z in zip([a1, a2], [b1, b2], [z1, z2]):
x1 = core.Input(name='x1', shape=a.shape)
x2 = core.Input(name='x2', shape=b.shape)
n = nodes.SquaredEuclidean('sqeuclid')
n.input_a.connect(x1)
n.input_b.connect(x2)
n.transform()
fx = util.compile(inputs=[x1, x2],
outputs=n.outputs.values())
np.testing.assert_equal(fx(a, b)[0], z)
def test_Product(self):
a1 = np.array([[3, -1], [4, 7]])
b1 = np.array([[1, -1], [4, 7]])
a2 = np.array([3, -1])
b2 = np.array([1, -1])
for a, b in zip([a1, a2], [b1, b2]):
x1 = core.Input(name='x1', shape=a.shape)
x2 = core.Input(name='x2', shape=b.shape)
n = nodes.Product('product')
n.input_a.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.input_b.connect(x2)
self.assertTrue(n.is_ready())
n.transform()
fx = util.compile(inputs=[x1, x2],
outputs=n.outputs.values())
np.testing.assert_equal(fx(a, b)[0], a*b)
def test_Affine_linear(self):
x1 = core.Input(name='x1', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
b = np.ones(3)
n = nodes.Affine(
name='affine',
input_shape=(None, 2),
output_shape=(None, 3),
act_type='linear')
n.weights.value = w
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], np.dot(a, w) + b)
def test_Affine_relu(self):
x1 = core.Input(name='x1', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
b = np.ones(3)
n = nodes.Affine(
name='affine',
input_shape=(None, 2),
output_shape=(None, 3),
act_type='relu')
n.weights.value = w
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], __relu__(np.dot(a, w) + b))
def test_Affine_dropout(self):
x1 = core.Input(name='x1', shape=(None, 2))
dropout = core.Input(name='dropout', shape=None)
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
b = np.ones(3)
n = nodes.Affine(
name='affine',
input_shape=(None, 2),
output_shape=(None, 3),
act_type='linear')
n.weights.value = w
n.bias.value = b
n.enable_dropout()
n.input.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.dropout.connect(dropout)
n.transform()
fx = util.compile(inputs=[x1, dropout], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, 0.0)[0], np.dot(a, w) + b)
self.assertGreaterEqual(np.equal(fx(a, 0.9)[0], 0.0).sum(), 1)
def test_Affine_share_params(self):
x = core.Input(name='x1', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
b = np.ones(3)
n1 = nodes.Affine(
name='affine',
input_shape=(None, 2),
output_shape=(None, 3),
act_type='linear')
n2 = nodes.Affine(
name='affine_copy',
input_shape=(None, 2),
output_shape=(None, 3),
act_type='linear')
n2.share_params(n1)
n1.weights.value = w
n1.bias.value = b
np.testing.assert_equal(n1.weights.value, n2.weights.value)
np.testing.assert_equal(n1.bias.value, n2.bias.value)
n2.input.connect(x)
n2.transform()
fx = util.compile(inputs=[x], outputs=n2.outputs.values())
np.testing.assert_equal(fx(a)[0], np.dot(a, w) + b)
n1.weights.value *= 2
np.testing.assert_equal(fx(a)[0], np.dot(a, 2*w) + b)
def test_Conv3D_linear(self):
x1 = core.Input(name='x1', shape=(None, 1, 2, 3))
a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)
w = np.array([[[1], [-2]],
[[-3], [4]],
[[5], [-6]]]).reshape(3, 2, 1)
b = np.arange(3)
# Note that convolutions flip the kernels
z = np.array([[(a*wi[::-1]).sum(axis=0) + bi
for wi, bi in zip(w, b)]])
n = nodes.Conv3D(
name='conv3d',
input_shape=(None, 1, 2, 3),
weight_shape=(3, 1, 2, 1),
act_type='linear')
n.weights.value = w.reshape(3, 1, 2, 1)
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a.reshape(1, 1, 2, 3))[0],
z.reshape(1, 3, 1, 3))
def test_Conv3D_relu(self):
x1 = core.Input(name='x1', shape=(None, 1, 2, 3))
a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)
w = np.array([[[1], [-2]],
[[-3], [4]],
[[5], [-6]]]).reshape(3, 2, 1)
b = np.arange(3)
# Note that convolutions flip the kernels
z = np.array([[(a*wi[::-1]).sum(axis=0) + bi
for wi, bi in zip(w, b)]])
# Reshape from convenience
a = a.reshape(1, 1, 2, 3)
z = z.reshape(1, 3, 1, 3)
n = nodes.Conv3D(
name='conv3d',
input_shape=(None, 1, 2, 3),
weight_shape=(3, 1, 2, 1),
act_type='relu')
n.weights.value = w.reshape(3, 1, 2, 1)
n.bias.value = b
n.input.connect(x1)
n.transform()
fx = util.compile(inputs=[x1], outputs=n.outputs.values())
np.testing.assert_equal(fx(a)[0], __relu__(z))
def test_Conv3D_dropout(self):
x1 = core.Input(name='x1', shape=(None, 1, 2, 3))
dropout = core.Input(name='dropout', shape=None)
a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)
w = np.array([[[1], [-2]],
[[-3], [4]],
[[5], [-6]]]).reshape(3, 2, 1)
b = np.arange(3)
# Note that convolutions flip the kernels
z = np.array([[(a*wi[::-1]).sum(axis=0) + bi
for wi, bi in zip(w, b)]])
# Reshape from convenience
a = a.reshape(1, 1, 2, 3)
z = z.reshape(1, 3, 1, 3)
n = nodes.Conv3D(
name='conv3d',
input_shape=(None, 1, 2, 3),
weight_shape=(3, 1, 2, 1),
act_type='linear')
n.enable_dropout()
n.weights.value = w.reshape(3, 1, 2, 1)
n.bias.value = b
n.input.connect(x1)
with self.assertRaises(nodes.UnconnectedNodeError):
n.transform()
n.dropout.connect(dropout)
n.transform()
fx = util.compile(inputs=[x1, dropout], outputs=n.outputs.values())
np.testing.assert_equal(fx(a, 0.0)[0], z)
self.assertGreaterEqual(np.equal(fx(a, 0.9)[0], 0.0).sum(), 1)
def test_RadialBasis(self):
x = core.Input(name='x', shape=(None, 2))
a = np.array([[3, -1], [4, 7]])
w = np.array([[1, -1], [2, -2], [3, -3]]).T
n = nodes.RadialBasis(
name='radial',
input_shape=x.shape,
output_shape=(None, 3))
n.weights.value = w.reshape(2, 3)
n.input.connect(x)
n.transform()
fx = util.compile(inputs=[x], outputs=n.outputs.values())
z = np.power(a.reshape(2, 2, 1) - w.reshape(1, 2, 3),
2.0).sum(axis=1)
np.testing.assert_equal(fx(a)[0], z)
def test_SliceGT(self):
x = core.Input(name='x', shape=(None,))
n = nodes.SliceGT(name='slice-greater', value=0)
n.input.connect(x)
n.transform()
fx = util.compile(inputs=[x], outputs=n.outputs.values())
a = np.array([1, -2, 0])
np.testing.assert_equal(fx(a)[0], np.array([1]))
if __name__ == "__main__":
unittest.main()
|
flexible
|
{
"blob_id": "8e74bd0c051b672bf22c2c8dfb03760805b105c5",
"index": 8799,
"step-1": "<mask token>\n\n\nclass NodeTests(unittest.TestCase):\n <mask token>\n\n def tearDown(self):\n pass\n <mask token>\n <mask token>\n\n def test_Add(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n x2 = core.Input(name='x2', shape=(2, 2))\n n = nodes.Add(name='accumulate', num_inputs=2)\n n.input_0.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_1.connect(x2)\n self.assertIsNone(n.output.shape)\n n.transform()\n self.assertEqual(n.output.shape, (2, 2))\n fx = util.compile(inputs=[x1, x2], outputs=[n.output])\n a = np.array([[3, -1], [3, 7]])\n b = np.array([[1, 2], [3, 4]])\n z = fx(a, b)[0]\n np.testing.assert_equal(z, np.array([[4, 1], [6, 11]]))\n\n @unittest.skip('Not fully implemented yet.')\n def test_Bincount(self):\n x1 = core.Input(name='x1', shape=(None,))\n n = nodes.Bincount(name='counter', max_int=3)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=[n.counts])\n a = np.array([3, 0, 3, 1])\n np.testing.assert_equal(n.counts.value, np.array([0, 0, 0, 0]))\n np.testing.assert_equal(fx(a)[0], np.array([1, 1, 0, 2]))\n np.testing.assert_equal(fx(a)[0], np.array([2, 2, 0, 4]))\n\n def test_Concatenate(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n x2 = core.Input(name='x2', shape=(2, 2))\n a = np.array([[3, -1], [3, 7]])\n b = np.array([[1, 2], [3, 4]])\n for axis in range(2):\n n = nodes.Concatenate(name='concatenate', num_inputs=2, axis=axis)\n n.input_0.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_1.connect(x2)\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=[n.output])\n z = fx(a, b)[0]\n np.testing.assert_equal(z, np.concatenate([a, b], axis=axis))\n\n def test_Stack(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n x2 = core.Input(name='x2', shape=(2, 3))\n a = np.arange(6).reshape(2, 3)\n b = np.arange(6).reshape(2, 3) + 6\n for axes in (None, (1, 2, 0), (2, 1, 0)):\n n = nodes.Stack(name='stack', num_inputs=2, axes=axes)\n n.input_1.connect(x2)\n n.input_0.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=[n.output])\n z = fx(a, b)[0]\n expected = np.array([a, b])\n if axes:\n expected = np.transpose(expected, axes)\n np.testing.assert_equal(z, expected)\n\n def test_Dimshuffle(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n a = np.zeros([2, 3])\n axes = [('x', 0, 1), (0, 1, 'x'), (1, 'x', 0)]\n shapes = [(1, 2, 3), (2, 3, 1), (3, 1, 2)]\n for ax, shp in zip(axes, shapes):\n n = nodes.Dimshuffle('dimshuffle', ax)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0].shape, shp)\n\n def test_Slice(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n a = np.arange(6).reshape(2, 3)\n slices = [(None, 1), (0, None), (1, 0)]\n ans = [a[:, 1], a[0, :], a[1, 0]]\n for slc, ans in zip(slices, ans):\n n = nodes.Slice('slice', slc)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], ans)\n <mask token>\n\n def test_Multiply(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [3, 7]])\n for w, shp in zip([-1, a], [None, a.shape]):\n n = nodes.Multiply(name='gain', weight_shape=shp)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], np.zeros_like(a))\n n.weight.value = w\n np.testing.assert_equal(fx(a)[0], w * a)\n n = nodes.Multiply(name='gain', weight_shape=(1, 2), broadcast=[0])\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], np.zeros_like(a))\n n.weight.value = a[0].reshape(1, -1)\n np.testing.assert_equal(fx(a)[0], a * a[0].reshape(1, -1))\n <mask token>\n\n def test_Min(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = -1, np.array([3, -1]), np.array([-1, 4])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Min('min', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Sum(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 13, np.array([7, 6]), np.array([2, 11])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Sum('sum', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Mean(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 13 / 4.0, np.array([7, 6]) / 2.0, np.array([2, 11]) / 2.0\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Mean('mean', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_NormalizeDim(self):\n x1 = core.Input(name='x1', shape=(1, 2, 3))\n a = np.array([[[3, 1, -1], [4, 0, 7]]], dtype=np.float32)\n expected = [np.sign(a), a / np.sqrt(np.array([25, 1, 50])).reshape(\n 1, 1, 3), a / np.sqrt(np.array([11, 65])).reshape(1, 2, 1)]\n for axis, ans in enumerate(expected):\n n = nodes.NormalizeDim('l2norm', axis=axis, mode='l2')\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_almost_equal(fx(a)[0], ans)\n <mask token>\n\n def test_SquaredEuclidean(self):\n a1 = np.array([[3, -1], [4, 7]])\n b1 = np.array([[1, -1], [4, 7]])\n a2 = np.array([3, -1])\n b2 = np.array([1, -1])\n z1 = np.power(a1 - b1, 2.0).sum(axis=1)\n z2 = np.power(a2 - b2, 2.0).sum()\n for a, b, z in zip([a1, a2], [b1, b2], [z1, z2]):\n x1 = core.Input(name='x1', shape=a.shape)\n x2 = core.Input(name='x2', shape=b.shape)\n n = nodes.SquaredEuclidean('sqeuclid')\n n.input_a.connect(x1)\n n.input_b.connect(x2)\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, b)[0], z)\n\n def test_Product(self):\n a1 = np.array([[3, -1], [4, 7]])\n b1 = np.array([[1, -1], [4, 7]])\n a2 = np.array([3, -1])\n b2 = np.array([1, -1])\n for a, b in zip([a1, a2], [b1, b2]):\n x1 = core.Input(name='x1', shape=a.shape)\n x2 = core.Input(name='x2', shape=b.shape)\n n = nodes.Product('product')\n n.input_a.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_b.connect(x2)\n self.assertTrue(n.is_ready())\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, b)[0], a * b)\n <mask token>\n\n def test_Affine_relu(self):\n x1 = core.Input(name='x1', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n b = np.ones(3)\n n = nodes.Affine(name='affine', input_shape=(None, 2), output_shape\n =(None, 3), act_type='relu')\n n.weights.value = w\n n.bias.value = b\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], __relu__(np.dot(a, w) + b))\n <mask token>\n <mask token>\n <mask token>\n\n def test_Conv3D_relu(self):\n x1 = core.Input(name='x1', shape=(None, 1, 2, 3))\n a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)\n w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)\n b = np.arange(3)\n z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(\n w, b)]])\n a = a.reshape(1, 1, 2, 3)\n z = z.reshape(1, 3, 1, 3)\n n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),\n weight_shape=(3, 1, 2, 1), act_type='relu')\n n.weights.value = w.reshape(3, 1, 2, 1)\n n.bias.value = b\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], __relu__(z))\n\n def test_Conv3D_dropout(self):\n x1 = core.Input(name='x1', shape=(None, 1, 2, 3))\n dropout = core.Input(name='dropout', shape=None)\n a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)\n w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)\n b = np.arange(3)\n z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(\n w, b)]])\n a = a.reshape(1, 1, 2, 3)\n z = z.reshape(1, 3, 1, 3)\n n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),\n weight_shape=(3, 1, 2, 1), act_type='linear')\n n.enable_dropout()\n n.weights.value = w.reshape(3, 1, 2, 1)\n n.bias.value = b\n n.input.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.dropout.connect(dropout)\n n.transform()\n fx = util.compile(inputs=[x1, dropout], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, 0.0)[0], z)\n self.assertGreaterEqual(np.equal(fx(a, 0.9)[0], 0.0).sum(), 1)\n\n def test_RadialBasis(self):\n x = core.Input(name='x', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n n = nodes.RadialBasis(name='radial', input_shape=x.shape,\n output_shape=(None, 3))\n n.weights.value = w.reshape(2, 3)\n n.input.connect(x)\n n.transform()\n fx = util.compile(inputs=[x], outputs=n.outputs.values())\n z = np.power(a.reshape(2, 2, 1) - w.reshape(1, 2, 3), 2.0).sum(axis=1)\n np.testing.assert_equal(fx(a)[0], z)\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass NodeTests(unittest.TestCase):\n <mask token>\n\n def tearDown(self):\n pass\n\n def test_Node(self):\n pass\n <mask token>\n\n def test_Add(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n x2 = core.Input(name='x2', shape=(2, 2))\n n = nodes.Add(name='accumulate', num_inputs=2)\n n.input_0.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_1.connect(x2)\n self.assertIsNone(n.output.shape)\n n.transform()\n self.assertEqual(n.output.shape, (2, 2))\n fx = util.compile(inputs=[x1, x2], outputs=[n.output])\n a = np.array([[3, -1], [3, 7]])\n b = np.array([[1, 2], [3, 4]])\n z = fx(a, b)[0]\n np.testing.assert_equal(z, np.array([[4, 1], [6, 11]]))\n\n @unittest.skip('Not fully implemented yet.')\n def test_Bincount(self):\n x1 = core.Input(name='x1', shape=(None,))\n n = nodes.Bincount(name='counter', max_int=3)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=[n.counts])\n a = np.array([3, 0, 3, 1])\n np.testing.assert_equal(n.counts.value, np.array([0, 0, 0, 0]))\n np.testing.assert_equal(fx(a)[0], np.array([1, 1, 0, 2]))\n np.testing.assert_equal(fx(a)[0], np.array([2, 2, 0, 4]))\n\n def test_Concatenate(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n x2 = core.Input(name='x2', shape=(2, 2))\n a = np.array([[3, -1], [3, 7]])\n b = np.array([[1, 2], [3, 4]])\n for axis in range(2):\n n = nodes.Concatenate(name='concatenate', num_inputs=2, axis=axis)\n n.input_0.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_1.connect(x2)\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=[n.output])\n z = fx(a, b)[0]\n np.testing.assert_equal(z, np.concatenate([a, b], axis=axis))\n\n def test_Stack(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n x2 = core.Input(name='x2', shape=(2, 3))\n a = np.arange(6).reshape(2, 3)\n b = np.arange(6).reshape(2, 3) + 6\n for axes in (None, (1, 2, 0), (2, 1, 0)):\n n = nodes.Stack(name='stack', num_inputs=2, axes=axes)\n n.input_1.connect(x2)\n n.input_0.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=[n.output])\n z = fx(a, b)[0]\n expected = np.array([a, b])\n if axes:\n expected = np.transpose(expected, axes)\n np.testing.assert_equal(z, expected)\n\n def test_Dimshuffle(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n a = np.zeros([2, 3])\n axes = [('x', 0, 1), (0, 1, 'x'), (1, 'x', 0)]\n shapes = [(1, 2, 3), (2, 3, 1), (3, 1, 2)]\n for ax, shp in zip(axes, shapes):\n n = nodes.Dimshuffle('dimshuffle', ax)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0].shape, shp)\n\n def test_Slice(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n a = np.arange(6).reshape(2, 3)\n slices = [(None, 1), (0, None), (1, 0)]\n ans = [a[:, 1], a[0, :], a[1, 0]]\n for slc, ans in zip(slices, ans):\n n = nodes.Slice('slice', slc)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], ans)\n <mask token>\n\n def test_Multiply(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [3, 7]])\n for w, shp in zip([-1, a], [None, a.shape]):\n n = nodes.Multiply(name='gain', weight_shape=shp)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], np.zeros_like(a))\n n.weight.value = w\n np.testing.assert_equal(fx(a)[0], w * a)\n n = nodes.Multiply(name='gain', weight_shape=(1, 2), broadcast=[0])\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], np.zeros_like(a))\n n.weight.value = a[0].reshape(1, -1)\n np.testing.assert_equal(fx(a)[0], a * a[0].reshape(1, -1))\n <mask token>\n\n def test_Min(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = -1, np.array([3, -1]), np.array([-1, 4])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Min('min', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Sum(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 13, np.array([7, 6]), np.array([2, 11])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Sum('sum', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Mean(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 13 / 4.0, np.array([7, 6]) / 2.0, np.array([2, 11]) / 2.0\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Mean('mean', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_NormalizeDim(self):\n x1 = core.Input(name='x1', shape=(1, 2, 3))\n a = np.array([[[3, 1, -1], [4, 0, 7]]], dtype=np.float32)\n expected = [np.sign(a), a / np.sqrt(np.array([25, 1, 50])).reshape(\n 1, 1, 3), a / np.sqrt(np.array([11, 65])).reshape(1, 2, 1)]\n for axis, ans in enumerate(expected):\n n = nodes.NormalizeDim('l2norm', axis=axis, mode='l2')\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_almost_equal(fx(a)[0], ans)\n <mask token>\n\n def test_SquaredEuclidean(self):\n a1 = np.array([[3, -1], [4, 7]])\n b1 = np.array([[1, -1], [4, 7]])\n a2 = np.array([3, -1])\n b2 = np.array([1, -1])\n z1 = np.power(a1 - b1, 2.0).sum(axis=1)\n z2 = np.power(a2 - b2, 2.0).sum()\n for a, b, z in zip([a1, a2], [b1, b2], [z1, z2]):\n x1 = core.Input(name='x1', shape=a.shape)\n x2 = core.Input(name='x2', shape=b.shape)\n n = nodes.SquaredEuclidean('sqeuclid')\n n.input_a.connect(x1)\n n.input_b.connect(x2)\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, b)[0], z)\n\n def test_Product(self):\n a1 = np.array([[3, -1], [4, 7]])\n b1 = np.array([[1, -1], [4, 7]])\n a2 = np.array([3, -1])\n b2 = np.array([1, -1])\n for a, b in zip([a1, a2], [b1, b2]):\n x1 = core.Input(name='x1', shape=a.shape)\n x2 = core.Input(name='x2', shape=b.shape)\n n = nodes.Product('product')\n n.input_a.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_b.connect(x2)\n self.assertTrue(n.is_ready())\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, b)[0], a * b)\n <mask token>\n\n def test_Affine_relu(self):\n x1 = core.Input(name='x1', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n b = np.ones(3)\n n = nodes.Affine(name='affine', input_shape=(None, 2), output_shape\n =(None, 3), act_type='relu')\n n.weights.value = w\n n.bias.value = b\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], __relu__(np.dot(a, w) + b))\n <mask token>\n <mask token>\n <mask token>\n\n def test_Conv3D_relu(self):\n x1 = core.Input(name='x1', shape=(None, 1, 2, 3))\n a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)\n w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)\n b = np.arange(3)\n z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(\n w, b)]])\n a = a.reshape(1, 1, 2, 3)\n z = z.reshape(1, 3, 1, 3)\n n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),\n weight_shape=(3, 1, 2, 1), act_type='relu')\n n.weights.value = w.reshape(3, 1, 2, 1)\n n.bias.value = b\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], __relu__(z))\n\n def test_Conv3D_dropout(self):\n x1 = core.Input(name='x1', shape=(None, 1, 2, 3))\n dropout = core.Input(name='dropout', shape=None)\n a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)\n w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)\n b = np.arange(3)\n z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(\n w, b)]])\n a = a.reshape(1, 1, 2, 3)\n z = z.reshape(1, 3, 1, 3)\n n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),\n weight_shape=(3, 1, 2, 1), act_type='linear')\n n.enable_dropout()\n n.weights.value = w.reshape(3, 1, 2, 1)\n n.bias.value = b\n n.input.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.dropout.connect(dropout)\n n.transform()\n fx = util.compile(inputs=[x1, dropout], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, 0.0)[0], z)\n self.assertGreaterEqual(np.equal(fx(a, 0.9)[0], 0.0).sum(), 1)\n\n def test_RadialBasis(self):\n x = core.Input(name='x', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n n = nodes.RadialBasis(name='radial', input_shape=x.shape,\n output_shape=(None, 3))\n n.weights.value = w.reshape(2, 3)\n n.input.connect(x)\n n.transform()\n fx = util.compile(inputs=[x], outputs=n.outputs.values())\n z = np.power(a.reshape(2, 2, 1) - w.reshape(1, 2, 3), 2.0).sum(axis=1)\n np.testing.assert_equal(fx(a)[0], z)\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass NodeTests(unittest.TestCase):\n <mask token>\n\n def tearDown(self):\n pass\n\n def test_Node(self):\n pass\n <mask token>\n\n def test_Add(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n x2 = core.Input(name='x2', shape=(2, 2))\n n = nodes.Add(name='accumulate', num_inputs=2)\n n.input_0.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_1.connect(x2)\n self.assertIsNone(n.output.shape)\n n.transform()\n self.assertEqual(n.output.shape, (2, 2))\n fx = util.compile(inputs=[x1, x2], outputs=[n.output])\n a = np.array([[3, -1], [3, 7]])\n b = np.array([[1, 2], [3, 4]])\n z = fx(a, b)[0]\n np.testing.assert_equal(z, np.array([[4, 1], [6, 11]]))\n\n @unittest.skip('Not fully implemented yet.')\n def test_Bincount(self):\n x1 = core.Input(name='x1', shape=(None,))\n n = nodes.Bincount(name='counter', max_int=3)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=[n.counts])\n a = np.array([3, 0, 3, 1])\n np.testing.assert_equal(n.counts.value, np.array([0, 0, 0, 0]))\n np.testing.assert_equal(fx(a)[0], np.array([1, 1, 0, 2]))\n np.testing.assert_equal(fx(a)[0], np.array([2, 2, 0, 4]))\n\n def test_Concatenate(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n x2 = core.Input(name='x2', shape=(2, 2))\n a = np.array([[3, -1], [3, 7]])\n b = np.array([[1, 2], [3, 4]])\n for axis in range(2):\n n = nodes.Concatenate(name='concatenate', num_inputs=2, axis=axis)\n n.input_0.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_1.connect(x2)\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=[n.output])\n z = fx(a, b)[0]\n np.testing.assert_equal(z, np.concatenate([a, b], axis=axis))\n\n def test_Stack(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n x2 = core.Input(name='x2', shape=(2, 3))\n a = np.arange(6).reshape(2, 3)\n b = np.arange(6).reshape(2, 3) + 6\n for axes in (None, (1, 2, 0), (2, 1, 0)):\n n = nodes.Stack(name='stack', num_inputs=2, axes=axes)\n n.input_1.connect(x2)\n n.input_0.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=[n.output])\n z = fx(a, b)[0]\n expected = np.array([a, b])\n if axes:\n expected = np.transpose(expected, axes)\n np.testing.assert_equal(z, expected)\n\n def test_Dimshuffle(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n a = np.zeros([2, 3])\n axes = [('x', 0, 1), (0, 1, 'x'), (1, 'x', 0)]\n shapes = [(1, 2, 3), (2, 3, 1), (3, 1, 2)]\n for ax, shp in zip(axes, shapes):\n n = nodes.Dimshuffle('dimshuffle', ax)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0].shape, shp)\n\n def test_Slice(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n a = np.arange(6).reshape(2, 3)\n slices = [(None, 1), (0, None), (1, 0)]\n ans = [a[:, 1], a[0, :], a[1, 0]]\n for slc, ans in zip(slices, ans):\n n = nodes.Slice('slice', slc)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], ans)\n <mask token>\n\n def test_Multiply(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [3, 7]])\n for w, shp in zip([-1, a], [None, a.shape]):\n n = nodes.Multiply(name='gain', weight_shape=shp)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], np.zeros_like(a))\n n.weight.value = w\n np.testing.assert_equal(fx(a)[0], w * a)\n n = nodes.Multiply(name='gain', weight_shape=(1, 2), broadcast=[0])\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], np.zeros_like(a))\n n.weight.value = a[0].reshape(1, -1)\n np.testing.assert_equal(fx(a)[0], a * a[0].reshape(1, -1))\n\n def test_Max(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 7, np.array([4, 7]), np.array([3, 7])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Max('max', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Min(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = -1, np.array([3, -1]), np.array([-1, 4])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Min('min', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Sum(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 13, np.array([7, 6]), np.array([2, 11])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Sum('sum', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Mean(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 13 / 4.0, np.array([7, 6]) / 2.0, np.array([2, 11]) / 2.0\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Mean('mean', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_NormalizeDim(self):\n x1 = core.Input(name='x1', shape=(1, 2, 3))\n a = np.array([[[3, 1, -1], [4, 0, 7]]], dtype=np.float32)\n expected = [np.sign(a), a / np.sqrt(np.array([25, 1, 50])).reshape(\n 1, 1, 3), a / np.sqrt(np.array([11, 65])).reshape(1, 2, 1)]\n for axis, ans in enumerate(expected):\n n = nodes.NormalizeDim('l2norm', axis=axis, mode='l2')\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_almost_equal(fx(a)[0], ans)\n\n def test_SelectIndex(self):\n x1 = core.Input(name='x1', shape=(None, 2))\n idx = core.Input(name='idx', shape=(None,), dtype='int32')\n a = np.array([[3, -1], [4, 7]])\n i = np.array([1, 0])\n n = nodes.SelectIndex('select')\n n.input.connect(x1)\n n.index.connect(idx)\n n.transform()\n fx = util.compile(inputs=[x1, idx], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, i)[0], np.array([-1, 4]))\n\n def test_SquaredEuclidean(self):\n a1 = np.array([[3, -1], [4, 7]])\n b1 = np.array([[1, -1], [4, 7]])\n a2 = np.array([3, -1])\n b2 = np.array([1, -1])\n z1 = np.power(a1 - b1, 2.0).sum(axis=1)\n z2 = np.power(a2 - b2, 2.0).sum()\n for a, b, z in zip([a1, a2], [b1, b2], [z1, z2]):\n x1 = core.Input(name='x1', shape=a.shape)\n x2 = core.Input(name='x2', shape=b.shape)\n n = nodes.SquaredEuclidean('sqeuclid')\n n.input_a.connect(x1)\n n.input_b.connect(x2)\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, b)[0], z)\n\n def test_Product(self):\n a1 = np.array([[3, -1], [4, 7]])\n b1 = np.array([[1, -1], [4, 7]])\n a2 = np.array([3, -1])\n b2 = np.array([1, -1])\n for a, b in zip([a1, a2], [b1, b2]):\n x1 = core.Input(name='x1', shape=a.shape)\n x2 = core.Input(name='x2', shape=b.shape)\n n = nodes.Product('product')\n n.input_a.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_b.connect(x2)\n self.assertTrue(n.is_ready())\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, b)[0], a * b)\n\n def test_Affine_linear(self):\n x1 = core.Input(name='x1', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n b = np.ones(3)\n n = nodes.Affine(name='affine', input_shape=(None, 2), output_shape\n =(None, 3), act_type='linear')\n n.weights.value = w\n n.bias.value = b\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], np.dot(a, w) + b)\n\n def test_Affine_relu(self):\n x1 = core.Input(name='x1', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n b = np.ones(3)\n n = nodes.Affine(name='affine', input_shape=(None, 2), output_shape\n =(None, 3), act_type='relu')\n n.weights.value = w\n n.bias.value = b\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], __relu__(np.dot(a, w) + b))\n <mask token>\n <mask token>\n <mask token>\n\n def test_Conv3D_relu(self):\n x1 = core.Input(name='x1', shape=(None, 1, 2, 3))\n a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)\n w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)\n b = np.arange(3)\n z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(\n w, b)]])\n a = a.reshape(1, 1, 2, 3)\n z = z.reshape(1, 3, 1, 3)\n n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),\n weight_shape=(3, 1, 2, 1), act_type='relu')\n n.weights.value = w.reshape(3, 1, 2, 1)\n n.bias.value = b\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], __relu__(z))\n\n def test_Conv3D_dropout(self):\n x1 = core.Input(name='x1', shape=(None, 1, 2, 3))\n dropout = core.Input(name='dropout', shape=None)\n a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)\n w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)\n b = np.arange(3)\n z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(\n w, b)]])\n a = a.reshape(1, 1, 2, 3)\n z = z.reshape(1, 3, 1, 3)\n n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),\n weight_shape=(3, 1, 2, 1), act_type='linear')\n n.enable_dropout()\n n.weights.value = w.reshape(3, 1, 2, 1)\n n.bias.value = b\n n.input.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.dropout.connect(dropout)\n n.transform()\n fx = util.compile(inputs=[x1, dropout], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, 0.0)[0], z)\n self.assertGreaterEqual(np.equal(fx(a, 0.9)[0], 0.0).sum(), 1)\n\n def test_RadialBasis(self):\n x = core.Input(name='x', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n n = nodes.RadialBasis(name='radial', input_shape=x.shape,\n output_shape=(None, 3))\n n.weights.value = w.reshape(2, 3)\n n.input.connect(x)\n n.transform()\n fx = util.compile(inputs=[x], outputs=n.outputs.values())\n z = np.power(a.reshape(2, 2, 1) - w.reshape(1, 2, 3), 2.0).sum(axis=1)\n np.testing.assert_equal(fx(a)[0], z)\n <mask token>\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass NodeTests(unittest.TestCase):\n <mask token>\n\n def tearDown(self):\n pass\n\n def test_Node(self):\n pass\n\n def test_Constant(self):\n n = nodes.Constant(name='test', shape=None)\n n.data.value = 1.0\n n.transform()\n fx = util.compile(inputs=[], outputs=[n.output])\n np.testing.assert_equal(np.array(fx()[0]), 1.0)\n\n def test_Add(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n x2 = core.Input(name='x2', shape=(2, 2))\n n = nodes.Add(name='accumulate', num_inputs=2)\n n.input_0.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_1.connect(x2)\n self.assertIsNone(n.output.shape)\n n.transform()\n self.assertEqual(n.output.shape, (2, 2))\n fx = util.compile(inputs=[x1, x2], outputs=[n.output])\n a = np.array([[3, -1], [3, 7]])\n b = np.array([[1, 2], [3, 4]])\n z = fx(a, b)[0]\n np.testing.assert_equal(z, np.array([[4, 1], [6, 11]]))\n\n @unittest.skip('Not fully implemented yet.')\n def test_Bincount(self):\n x1 = core.Input(name='x1', shape=(None,))\n n = nodes.Bincount(name='counter', max_int=3)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=[n.counts])\n a = np.array([3, 0, 3, 1])\n np.testing.assert_equal(n.counts.value, np.array([0, 0, 0, 0]))\n np.testing.assert_equal(fx(a)[0], np.array([1, 1, 0, 2]))\n np.testing.assert_equal(fx(a)[0], np.array([2, 2, 0, 4]))\n\n def test_Concatenate(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n x2 = core.Input(name='x2', shape=(2, 2))\n a = np.array([[3, -1], [3, 7]])\n b = np.array([[1, 2], [3, 4]])\n for axis in range(2):\n n = nodes.Concatenate(name='concatenate', num_inputs=2, axis=axis)\n n.input_0.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_1.connect(x2)\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=[n.output])\n z = fx(a, b)[0]\n np.testing.assert_equal(z, np.concatenate([a, b], axis=axis))\n\n def test_Stack(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n x2 = core.Input(name='x2', shape=(2, 3))\n a = np.arange(6).reshape(2, 3)\n b = np.arange(6).reshape(2, 3) + 6\n for axes in (None, (1, 2, 0), (2, 1, 0)):\n n = nodes.Stack(name='stack', num_inputs=2, axes=axes)\n n.input_1.connect(x2)\n n.input_0.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=[n.output])\n z = fx(a, b)[0]\n expected = np.array([a, b])\n if axes:\n expected = np.transpose(expected, axes)\n np.testing.assert_equal(z, expected)\n\n def test_Dimshuffle(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n a = np.zeros([2, 3])\n axes = [('x', 0, 1), (0, 1, 'x'), (1, 'x', 0)]\n shapes = [(1, 2, 3), (2, 3, 1), (3, 1, 2)]\n for ax, shp in zip(axes, shapes):\n n = nodes.Dimshuffle('dimshuffle', ax)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0].shape, shp)\n\n def test_Slice(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n a = np.arange(6).reshape(2, 3)\n slices = [(None, 1), (0, None), (1, 0)]\n ans = [a[:, 1], a[0, :], a[1, 0]]\n for slc, ans in zip(slices, ans):\n n = nodes.Slice('slice', slc)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], ans)\n <mask token>\n\n def test_Multiply(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [3, 7]])\n for w, shp in zip([-1, a], [None, a.shape]):\n n = nodes.Multiply(name='gain', weight_shape=shp)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], np.zeros_like(a))\n n.weight.value = w\n np.testing.assert_equal(fx(a)[0], w * a)\n n = nodes.Multiply(name='gain', weight_shape=(1, 2), broadcast=[0])\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], np.zeros_like(a))\n n.weight.value = a[0].reshape(1, -1)\n np.testing.assert_equal(fx(a)[0], a * a[0].reshape(1, -1))\n\n def test_Max(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 7, np.array([4, 7]), np.array([3, 7])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Max('max', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Min(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = -1, np.array([3, -1]), np.array([-1, 4])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Min('min', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Sum(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 13, np.array([7, 6]), np.array([2, 11])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Sum('sum', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Mean(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 13 / 4.0, np.array([7, 6]) / 2.0, np.array([2, 11]) / 2.0\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Mean('mean', axis=axis)\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_NormalizeDim(self):\n x1 = core.Input(name='x1', shape=(1, 2, 3))\n a = np.array([[[3, 1, -1], [4, 0, 7]]], dtype=np.float32)\n expected = [np.sign(a), a / np.sqrt(np.array([25, 1, 50])).reshape(\n 1, 1, 3), a / np.sqrt(np.array([11, 65])).reshape(1, 2, 1)]\n for axis, ans in enumerate(expected):\n n = nodes.NormalizeDim('l2norm', axis=axis, mode='l2')\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=n.inputs.values(), outputs=n.outputs.\n values())\n np.testing.assert_almost_equal(fx(a)[0], ans)\n\n def test_SelectIndex(self):\n x1 = core.Input(name='x1', shape=(None, 2))\n idx = core.Input(name='idx', shape=(None,), dtype='int32')\n a = np.array([[3, -1], [4, 7]])\n i = np.array([1, 0])\n n = nodes.SelectIndex('select')\n n.input.connect(x1)\n n.index.connect(idx)\n n.transform()\n fx = util.compile(inputs=[x1, idx], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, i)[0], np.array([-1, 4]))\n\n def test_SquaredEuclidean(self):\n a1 = np.array([[3, -1], [4, 7]])\n b1 = np.array([[1, -1], [4, 7]])\n a2 = np.array([3, -1])\n b2 = np.array([1, -1])\n z1 = np.power(a1 - b1, 2.0).sum(axis=1)\n z2 = np.power(a2 - b2, 2.0).sum()\n for a, b, z in zip([a1, a2], [b1, b2], [z1, z2]):\n x1 = core.Input(name='x1', shape=a.shape)\n x2 = core.Input(name='x2', shape=b.shape)\n n = nodes.SquaredEuclidean('sqeuclid')\n n.input_a.connect(x1)\n n.input_b.connect(x2)\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, b)[0], z)\n\n def test_Product(self):\n a1 = np.array([[3, -1], [4, 7]])\n b1 = np.array([[1, -1], [4, 7]])\n a2 = np.array([3, -1])\n b2 = np.array([1, -1])\n for a, b in zip([a1, a2], [b1, b2]):\n x1 = core.Input(name='x1', shape=a.shape)\n x2 = core.Input(name='x2', shape=b.shape)\n n = nodes.Product('product')\n n.input_a.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_b.connect(x2)\n self.assertTrue(n.is_ready())\n n.transform()\n fx = util.compile(inputs=[x1, x2], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, b)[0], a * b)\n\n def test_Affine_linear(self):\n x1 = core.Input(name='x1', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n b = np.ones(3)\n n = nodes.Affine(name='affine', input_shape=(None, 2), output_shape\n =(None, 3), act_type='linear')\n n.weights.value = w\n n.bias.value = b\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], np.dot(a, w) + b)\n\n def test_Affine_relu(self):\n x1 = core.Input(name='x1', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n b = np.ones(3)\n n = nodes.Affine(name='affine', input_shape=(None, 2), output_shape\n =(None, 3), act_type='relu')\n n.weights.value = w\n n.bias.value = b\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], __relu__(np.dot(a, w) + b))\n <mask token>\n <mask token>\n <mask token>\n\n def test_Conv3D_relu(self):\n x1 = core.Input(name='x1', shape=(None, 1, 2, 3))\n a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)\n w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)\n b = np.arange(3)\n z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(\n w, b)]])\n a = a.reshape(1, 1, 2, 3)\n z = z.reshape(1, 3, 1, 3)\n n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),\n weight_shape=(3, 1, 2, 1), act_type='relu')\n n.weights.value = w.reshape(3, 1, 2, 1)\n n.bias.value = b\n n.input.connect(x1)\n n.transform()\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], __relu__(z))\n\n def test_Conv3D_dropout(self):\n x1 = core.Input(name='x1', shape=(None, 1, 2, 3))\n dropout = core.Input(name='dropout', shape=None)\n a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)\n w = np.array([[[1], [-2]], [[-3], [4]], [[5], [-6]]]).reshape(3, 2, 1)\n b = np.arange(3)\n z = np.array([[((a * wi[::-1]).sum(axis=0) + bi) for wi, bi in zip(\n w, b)]])\n a = a.reshape(1, 1, 2, 3)\n z = z.reshape(1, 3, 1, 3)\n n = nodes.Conv3D(name='conv3d', input_shape=(None, 1, 2, 3),\n weight_shape=(3, 1, 2, 1), act_type='linear')\n n.enable_dropout()\n n.weights.value = w.reshape(3, 1, 2, 1)\n n.bias.value = b\n n.input.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.dropout.connect(dropout)\n n.transform()\n fx = util.compile(inputs=[x1, dropout], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, 0.0)[0], z)\n self.assertGreaterEqual(np.equal(fx(a, 0.9)[0], 0.0).sum(), 1)\n\n def test_RadialBasis(self):\n x = core.Input(name='x', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n n = nodes.RadialBasis(name='radial', input_shape=x.shape,\n output_shape=(None, 3))\n n.weights.value = w.reshape(2, 3)\n n.input.connect(x)\n n.transform()\n fx = util.compile(inputs=[x], outputs=n.outputs.values())\n z = np.power(a.reshape(2, 2, 1) - w.reshape(1, 2, 3), 2.0).sum(axis=1)\n np.testing.assert_equal(fx(a)[0], z)\n\n def test_SliceGT(self):\n x = core.Input(name='x', shape=(None,))\n n = nodes.SliceGT(name='slice-greater', value=0)\n n.input.connect(x)\n n.transform()\n fx = util.compile(inputs=[x], outputs=n.outputs.values())\n a = np.array([1, -2, 0])\n np.testing.assert_equal(fx(a)[0], np.array([1]))\n\n\n<mask token>\n",
"step-5": "\"\"\"Tests for Node objects.\"\"\"\n\nimport numpy as np\nimport unittest\n\nimport optimus.core as core\nimport optimus.nodes as nodes\nimport optimus.util as util\n\n\ndef __relu__(x):\n \"Numpy Rectified Linear Unit.\"\n return 0.5 * (np.abs(x) + x)\n\n\nclass NodeTests(unittest.TestCase):\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def test_Node(self):\n pass\n\n def test_Constant(self):\n n = nodes.Constant(name='test', shape=None)\n n.data.value = 1.0\n\n n.transform()\n fx = util.compile(inputs=[], outputs=[n.output])\n\n np.testing.assert_equal(np.array(fx()[0]), 1.0)\n\n def test_Add(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n x2 = core.Input(name='x2', shape=(2, 2))\n\n n = nodes.Add(name='accumulate', num_inputs=2)\n n.input_0.connect(x1)\n\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n\n n.input_1.connect(x2)\n self.assertIsNone(n.output.shape)\n n.transform()\n self.assertEqual(n.output.shape, (2, 2))\n\n fx = util.compile(inputs=[x1, x2],\n outputs=[n.output])\n a = np.array([[3, -1], [3, 7]])\n b = np.array([[1, 2], [3, 4]])\n\n z = fx(a, b)[0]\n np.testing.assert_equal(z, np.array([[4, 1], [6, 11]]))\n\n @unittest.skip(\"Not fully implemented yet.\")\n def test_Bincount(self):\n x1 = core.Input(name='x1', shape=(None,))\n\n n = nodes.Bincount(name='counter', max_int=3)\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=[x1], outputs=[n.counts])\n a = np.array([3, 0, 3, 1])\n\n np.testing.assert_equal(n.counts.value, np.array([0, 0, 0, 0]))\n np.testing.assert_equal(fx(a)[0], np.array([1, 1, 0, 2]))\n np.testing.assert_equal(fx(a)[0], np.array([2, 2, 0, 4]))\n\n def test_Concatenate(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n x2 = core.Input(name='x2', shape=(2, 2))\n a = np.array([[3, -1], [3, 7]])\n b = np.array([[1, 2], [3, 4]])\n\n for axis in range(2):\n n = nodes.Concatenate(name='concatenate', num_inputs=2, axis=axis)\n n.input_0.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_1.connect(x2)\n n.transform()\n\n fx = util.compile(inputs=[x1, x2],\n outputs=[n.output])\n\n z = fx(a, b)[0]\n np.testing.assert_equal(z, np.concatenate([a, b], axis=axis))\n\n def test_Stack(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n x2 = core.Input(name='x2', shape=(2, 3))\n a = np.arange(6).reshape(2, 3)\n b = np.arange(6).reshape(2, 3) + 6\n\n for axes in None, (1, 2, 0), (2, 1, 0):\n n = nodes.Stack(name='stack', num_inputs=2, axes=axes)\n n.input_1.connect(x2)\n n.input_0.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=[x1, x2],\n outputs=[n.output])\n\n z = fx(a, b)[0]\n expected = np.array([a, b])\n if axes:\n expected = np.transpose(expected, axes)\n np.testing.assert_equal(z, expected)\n\n def test_Dimshuffle(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n a = np.zeros([2, 3])\n axes = [('x', 0, 1), (0, 1, 'x'), (1, 'x', 0)]\n shapes = [(1, 2, 3), (2, 3, 1), (3, 1, 2)]\n for ax, shp in zip(axes, shapes):\n n = nodes.Dimshuffle('dimshuffle', ax)\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=n.inputs.values(),\n outputs=n.outputs.values())\n\n np.testing.assert_equal(fx(a)[0].shape, shp)\n\n def test_Slice(self):\n x1 = core.Input(name='x1', shape=(2, 3))\n a = np.arange(6).reshape(2, 3)\n slices = [(None, 1), (0, None), (1, 0)]\n ans = [a[:, 1], a[0, :], a[1, 0]]\n for slc, ans in zip(slices, ans):\n n = nodes.Slice('slice', slc)\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=n.inputs.values(),\n outputs=n.outputs.values())\n\n np.testing.assert_equal(fx(a)[0], ans)\n\n def test_Log(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n log = nodes.Log('log')\n log.input.connect(x1)\n log.transform()\n\n fx = util.compile(inputs=log.inputs.values(),\n outputs=log.outputs.values())\n\n a = np.array([[3, 1], [4, 7]], dtype=np.float32)\n z = fx(a)[0]\n np.testing.assert_almost_equal(z, np.log(a))\n\n def test_Multiply(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [3, 7]])\n\n for w, shp in zip([-1, a], [None, a.shape]):\n n = nodes.Multiply(name='gain', weight_shape=shp)\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=n.inputs.values(),\n outputs=n.outputs.values())\n\n np.testing.assert_equal(fx(a)[0], np.zeros_like(a))\n\n n.weight.value = w\n np.testing.assert_equal(fx(a)[0], w*a)\n\n n = nodes.Multiply(name='gain', weight_shape=(1, 2), broadcast=[0])\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=n.inputs.values(),\n outputs=n.outputs.values())\n\n np.testing.assert_equal(fx(a)[0], np.zeros_like(a))\n\n n.weight.value = a[0].reshape(1, -1)\n np.testing.assert_equal(fx(a)[0], a*a[0].reshape(1, -1))\n\n def test_Max(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 7, np.array([4, 7]), np.array([3, 7])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Max('max', axis=axis)\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=n.inputs.values(),\n outputs=n.outputs.values())\n\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Min(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = -1, np.array([3, -1]), np.array([-1, 4])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Min('min', axis=axis)\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=n.inputs.values(),\n outputs=n.outputs.values())\n\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Sum(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 13, np.array([7, 6]), np.array([2, 11])\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Sum('sum', axis=axis)\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=n.inputs.values(),\n outputs=n.outputs.values())\n\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_Mean(self):\n x1 = core.Input(name='x1', shape=(2, 2))\n a = np.array([[3, -1], [4, 7]])\n res = 13 / 4.0, np.array([7, 6]) / 2.0, np.array([2, 11]) / 2.0\n for idx, axis in enumerate([None, 0, 1]):\n n = nodes.Mean('mean', axis=axis)\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=n.inputs.values(),\n outputs=n.outputs.values())\n\n np.testing.assert_equal(fx(a)[0], res[idx])\n\n def test_NormalizeDim(self):\n x1 = core.Input(name='x1', shape=(1, 2, 3))\n a = np.array([[[3, 1, -1], [4, 0, 7]]], dtype=np.float32)\n expected = [np.sign(a),\n a / np.sqrt(np.array([25, 1, 50])).reshape(1, 1, 3),\n a / np.sqrt(np.array([11, 65])).reshape(1, 2, 1)]\n for axis, ans in enumerate(expected):\n n = nodes.NormalizeDim('l2norm', axis=axis, mode='l2')\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=n.inputs.values(),\n outputs=n.outputs.values())\n np.testing.assert_almost_equal(fx(a)[0], ans)\n\n def test_SelectIndex(self):\n x1 = core.Input(name='x1', shape=(None, 2))\n idx = core.Input(name='idx', shape=(None,), dtype='int32')\n a = np.array([[3, -1], [4, 7]])\n i = np.array([1, 0])\n\n n = nodes.SelectIndex('select')\n n.input.connect(x1)\n n.index.connect(idx)\n n.transform()\n\n fx = util.compile(inputs=[x1, idx],\n outputs=n.outputs.values())\n\n np.testing.assert_equal(fx(a, i)[0], np.array([-1, 4]))\n\n def test_SquaredEuclidean(self):\n a1 = np.array([[3, -1], [4, 7]])\n b1 = np.array([[1, -1], [4, 7]])\n a2 = np.array([3, -1])\n b2 = np.array([1, -1])\n\n z1 = np.power(a1 - b1, 2.0).sum(axis=1)\n z2 = np.power(a2 - b2, 2.0).sum()\n for a, b, z in zip([a1, a2], [b1, b2], [z1, z2]):\n x1 = core.Input(name='x1', shape=a.shape)\n x2 = core.Input(name='x2', shape=b.shape)\n n = nodes.SquaredEuclidean('sqeuclid')\n n.input_a.connect(x1)\n n.input_b.connect(x2)\n n.transform()\n\n fx = util.compile(inputs=[x1, x2],\n outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, b)[0], z)\n\n def test_Product(self):\n a1 = np.array([[3, -1], [4, 7]])\n b1 = np.array([[1, -1], [4, 7]])\n a2 = np.array([3, -1])\n b2 = np.array([1, -1])\n\n for a, b in zip([a1, a2], [b1, b2]):\n x1 = core.Input(name='x1', shape=a.shape)\n x2 = core.Input(name='x2', shape=b.shape)\n n = nodes.Product('product')\n n.input_a.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.input_b.connect(x2)\n self.assertTrue(n.is_ready())\n n.transform()\n\n fx = util.compile(inputs=[x1, x2],\n outputs=n.outputs.values())\n np.testing.assert_equal(fx(a, b)[0], a*b)\n\n def test_Affine_linear(self):\n x1 = core.Input(name='x1', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n b = np.ones(3)\n\n n = nodes.Affine(\n name='affine',\n input_shape=(None, 2),\n output_shape=(None, 3),\n act_type='linear')\n n.weights.value = w\n n.bias.value = b\n\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], np.dot(a, w) + b)\n\n def test_Affine_relu(self):\n x1 = core.Input(name='x1', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n b = np.ones(3)\n\n n = nodes.Affine(\n name='affine',\n input_shape=(None, 2),\n output_shape=(None, 3),\n act_type='relu')\n n.weights.value = w\n n.bias.value = b\n\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], __relu__(np.dot(a, w) + b))\n\n def test_Affine_dropout(self):\n x1 = core.Input(name='x1', shape=(None, 2))\n dropout = core.Input(name='dropout', shape=None)\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n b = np.ones(3)\n\n n = nodes.Affine(\n name='affine',\n input_shape=(None, 2),\n output_shape=(None, 3),\n act_type='linear')\n n.weights.value = w\n n.bias.value = b\n n.enable_dropout()\n\n n.input.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.dropout.connect(dropout)\n n.transform()\n\n fx = util.compile(inputs=[x1, dropout], outputs=n.outputs.values())\n\n np.testing.assert_equal(fx(a, 0.0)[0], np.dot(a, w) + b)\n self.assertGreaterEqual(np.equal(fx(a, 0.9)[0], 0.0).sum(), 1)\n\n def test_Affine_share_params(self):\n x = core.Input(name='x1', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n b = np.ones(3)\n\n n1 = nodes.Affine(\n name='affine',\n input_shape=(None, 2),\n output_shape=(None, 3),\n act_type='linear')\n\n n2 = nodes.Affine(\n name='affine_copy',\n input_shape=(None, 2),\n output_shape=(None, 3),\n act_type='linear')\n\n n2.share_params(n1)\n\n n1.weights.value = w\n n1.bias.value = b\n\n np.testing.assert_equal(n1.weights.value, n2.weights.value)\n np.testing.assert_equal(n1.bias.value, n2.bias.value)\n\n n2.input.connect(x)\n n2.transform()\n\n fx = util.compile(inputs=[x], outputs=n2.outputs.values())\n np.testing.assert_equal(fx(a)[0], np.dot(a, w) + b)\n\n n1.weights.value *= 2\n np.testing.assert_equal(fx(a)[0], np.dot(a, 2*w) + b)\n\n def test_Conv3D_linear(self):\n x1 = core.Input(name='x1', shape=(None, 1, 2, 3))\n a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)\n w = np.array([[[1], [-2]],\n [[-3], [4]],\n [[5], [-6]]]).reshape(3, 2, 1)\n b = np.arange(3)\n\n # Note that convolutions flip the kernels\n z = np.array([[(a*wi[::-1]).sum(axis=0) + bi\n for wi, bi in zip(w, b)]])\n\n n = nodes.Conv3D(\n name='conv3d',\n input_shape=(None, 1, 2, 3),\n weight_shape=(3, 1, 2, 1),\n act_type='linear')\n\n n.weights.value = w.reshape(3, 1, 2, 1)\n n.bias.value = b\n\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a.reshape(1, 1, 2, 3))[0],\n z.reshape(1, 3, 1, 3))\n\n def test_Conv3D_relu(self):\n x1 = core.Input(name='x1', shape=(None, 1, 2, 3))\n a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)\n w = np.array([[[1], [-2]],\n [[-3], [4]],\n [[5], [-6]]]).reshape(3, 2, 1)\n b = np.arange(3)\n\n # Note that convolutions flip the kernels\n z = np.array([[(a*wi[::-1]).sum(axis=0) + bi\n for wi, bi in zip(w, b)]])\n\n # Reshape from convenience\n a = a.reshape(1, 1, 2, 3)\n z = z.reshape(1, 3, 1, 3)\n\n n = nodes.Conv3D(\n name='conv3d',\n input_shape=(None, 1, 2, 3),\n weight_shape=(3, 1, 2, 1),\n act_type='relu')\n\n n.weights.value = w.reshape(3, 1, 2, 1)\n n.bias.value = b\n\n n.input.connect(x1)\n n.transform()\n\n fx = util.compile(inputs=[x1], outputs=n.outputs.values())\n np.testing.assert_equal(fx(a)[0], __relu__(z))\n\n def test_Conv3D_dropout(self):\n x1 = core.Input(name='x1', shape=(None, 1, 2, 3))\n dropout = core.Input(name='dropout', shape=None)\n a = np.array([[3, -1], [4, 7], [2, -6]]).reshape(2, 3)\n w = np.array([[[1], [-2]],\n [[-3], [4]],\n [[5], [-6]]]).reshape(3, 2, 1)\n b = np.arange(3)\n\n # Note that convolutions flip the kernels\n z = np.array([[(a*wi[::-1]).sum(axis=0) + bi\n for wi, bi in zip(w, b)]])\n\n # Reshape from convenience\n a = a.reshape(1, 1, 2, 3)\n z = z.reshape(1, 3, 1, 3)\n\n n = nodes.Conv3D(\n name='conv3d',\n input_shape=(None, 1, 2, 3),\n weight_shape=(3, 1, 2, 1),\n act_type='linear')\n\n n.enable_dropout()\n n.weights.value = w.reshape(3, 1, 2, 1)\n n.bias.value = b\n\n n.input.connect(x1)\n with self.assertRaises(nodes.UnconnectedNodeError):\n n.transform()\n n.dropout.connect(dropout)\n n.transform()\n\n fx = util.compile(inputs=[x1, dropout], outputs=n.outputs.values())\n\n np.testing.assert_equal(fx(a, 0.0)[0], z)\n self.assertGreaterEqual(np.equal(fx(a, 0.9)[0], 0.0).sum(), 1)\n\n def test_RadialBasis(self):\n x = core.Input(name='x', shape=(None, 2))\n a = np.array([[3, -1], [4, 7]])\n w = np.array([[1, -1], [2, -2], [3, -3]]).T\n\n n = nodes.RadialBasis(\n name='radial',\n input_shape=x.shape,\n output_shape=(None, 3))\n n.weights.value = w.reshape(2, 3)\n n.input.connect(x)\n n.transform()\n\n fx = util.compile(inputs=[x], outputs=n.outputs.values())\n z = np.power(a.reshape(2, 2, 1) - w.reshape(1, 2, 3),\n 2.0).sum(axis=1)\n np.testing.assert_equal(fx(a)[0], z)\n\n def test_SliceGT(self):\n x = core.Input(name='x', shape=(None,))\n\n n = nodes.SliceGT(name='slice-greater', value=0)\n n.input.connect(x)\n n.transform()\n\n fx = util.compile(inputs=[x], outputs=n.outputs.values())\n a = np.array([1, -2, 0])\n np.testing.assert_equal(fx(a)[0], np.array([1]))\n\n\nif __name__ == \"__main__\":\n unittest.main()\n",
"step-ids": [
19,
20,
23,
25,
34
]
}
|
[
19,
20,
23,
25,
34
] |
rate=69
dollar=int(input("enter an dollars to convert:"))
inr=dollar*rate
print('INR :Rs.',inr,'/-')
|
normal
|
{
"blob_id": "62018b32bf0c66fa7ec3cc0fcbdc16e28b4ef2d6",
"index": 2396,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('INR :Rs.', inr, '/-')\n",
"step-3": "rate = 69\ndollar = int(input('enter an dollars to convert:'))\ninr = dollar * rate\nprint('INR :Rs.', inr, '/-')\n",
"step-4": "rate=69\ndollar=int(input(\"enter an dollars to convert:\"))\ninr=dollar*rate\nprint('INR :Rs.',inr,'/-')",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class OptionsManager(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
logging.basicConfig(format=format, level=logging.INFO, datefmt='%H:%M:%S')
logging.getLogger().setLevel(logging.DEBUG)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def save_to_file(self):
"""Save options to settings file.
"""
check_path(self.config_path)
with open(self.settings_file, 'w') as settings_file:
options = self._get_options()
json.dump(options, settings_file, indent=4, separators=(',', ': '))
def _settings_coordinate(self, settings_dict):
"""
Check settings.json dictionary
Args:
settings_dict: Options dict loaded. See load_from_file() method.
Return:
True if settings.json is valid / else False
"""
VALID_VIDEO_FORMAT = ('0', '17', '36', '5', '34', '35', '43', '44',
'45', '46', '18', '22', '37', '38', '160', '133', '134', '135',
'136', '137', '264', '138', '242', '243', '244', '247', '248',
'271', '272', '82', '83', '84', '85', '100', '101', '102',
'139', '140', '141', '171', '172')
VALID_AUDIO_FORMAT = 'mp3', 'wav', 'aac', 'm4a', 'vorbis', 'opus', ''
VALID_AUDIO_QUALITY = '0', '5', '9'
VALID_FILESIZE_UNIT = '', 'k', 'm', 'g', 't', 'p', 'e', 'z', 'y'
VALID_SUB_LANGUAGE = ('en', 'el', 'pt', 'fr', 'it', 'ru', 'es',
'de', 'he', 'sv', 'tr')
MIN_FRAME_SIZE = 100
for key in self.options:
if key not in settings_dict:
return False
if type(self.options[key]) != type(settings_dict[key]):
return False
rules_dict = {'video_format': FORMATS.keys(), 'second_video_format':
VALID_VIDEO_FORMAT, 'audio_format': VALID_AUDIO_FORMAT,
'audio_quality': VALID_AUDIO_QUALITY, 'output_format':
OUTPUT_FORMATS.keys(), 'min_filesize_unit': VALID_FILESIZE_UNIT,
'max_filesize_unit': VALID_FILESIZE_UNIT, 'subs_lang':
VALID_SUB_LANGUAGE}
for key, valid_list in rules_dict.items():
if settings_dict[key] not in valid_list:
return False
if settings_dict['workers_number'] < 1:
return False
return True
def _get_options(self):
"""
Return options dictionary.
"""
tmp_options = self.options.copy()
for key in self.SENSITIVE_KEYS:
tmp_options[key] = ''
return tmp_options
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class OptionsManager(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
logging.basicConfig(format=format, level=logging.INFO, datefmt='%H:%M:%S')
logging.getLogger().setLevel(logging.DEBUG)
<|reserved_special_token_0|>
def load_default(self):
"""Load the default options.
Note:
This method is automatically called by the constructor.
Options Description:
'save_path' (string): Path where youtube-dl shoult store
the downloaded file. default is $HOME (~\\Downloads)
'save_path_dirs' (list): List that contains temporary save paths.
'video_format' (string): Video format to download.
When this option is '0' youtube-dl will choose
the best video format for given URL.
'second_video_format' (string): Video format to mix with the
one (-f 18+17)
'to_audio' (boolean): If True youtube-dl will post process the
video file.
'keep_video' (boolean): If True youtube-dl will keep the video
after post processing it.
'audio_format' (string): Audio format of the post processed file.
values are: mp3, wav, aac, m4a, vorbis, opus.
'audio_quality' (string): Audio quality of the post processed file.
values are: 9, 5, 0. The lowest value the better the quality.
'restrict_filenames' (boolean): If True youtube-dl will restrict
the downloaded file filename to ASCII characters only.
'output_format' (int): This options sets the downloaded file
output template. See formats.OUTPUT_FORMATS for mor info.
'output_template' (string) : Can be any output template supported
by youtube-dl
'playlist_start' (int): Playlist index to start downloading
'playlist_end' (int): Playlist index to stop downloading.
'max_downloads' (int): Maximun number of video files to download
from the given playlist.
'min_filesize' (float): Min file size of the video file.
if the video is smaller than the given size then
youtube-dl will abort the download process.
'max_filesize' (float): Min file size of the video file.
if the video is larger than the given size then
youtube-dl will abort the download process.
'min_filesize_unit' (string): Minimum file size unit.
values are: '', k, m, g, y, p, e, z, y.
'max_filesize_unit' (string): Maximum file size unit.
values are: '', k, m, g, y, p, e, z, y.
'write_subs' (boolean): If True youtube-dl will try to download
the subtitles file for the given URL.
'write_all_subs' (boolean): If True youtube-dl will try to download
all the available subtitles for the given URL.
'write_auto_subs' (boolean): If True youtube-dl will try to download
the automatic subtitlees file for the given URL.
'embed_subs' (boolean): If True youtube-dl will try to merge the
subtitles file with the video. (ONLY mp4 files)
'subs_lang' (string): Language of the subtitles file to download.
Needs 'write_subs' option.
'ignore_errors' (boolean): If True youtube-dl will ignore the errors
and continue the download process.
'open_dl_dir' (boolean): If True youtube-dl will open the destination
folder after download process has been completed.
'write_description' (boolean): If True youtube-dl will the video
description to a *.description file.
'write_info' (boolean): If True youtube-dl will write
video metadata to a *.info.json file.
'write_thumbnail' (boolean): If True youtube-dl will write a
thumbnail image to disk.
'retries' (int): Number of youtube-dl retries.
'user_agent' (string): Specify a custom user agent for youtube-dl
'referer' (string): Specify a custom referer to user if the video
access is restricted to one domain.
'proxy' (string): Use the specified HTTP/HTTPS proxy.
'shutdown' (boolean): Shutdown PC after download process completed.
'sudo_password' (string): SUDO password for the shutdown process
if the user does not have elevated privileges.
'username' (string): Username to login with.
'password' (string): Password to login with.
'video_password' (string): Video Password for the given URL.
'youtubedl_path' (string): Absolute the path to the youtube-dl binary.
Default is the self.config_path. You can change this position
to point anywhere if you want to use the youtube-dl binary on your system.
This is also the directory where youtube-dlg will auto download the
youtube-dl if not exists so you should make sure you have write access
if you want to update the youtube-dl binary from within youtube-dlg.
'cmd_args' (string): String that contains extra youtube-dl options
seperated by spaces.
'enable_log' (boolean): If True youtube-dlg will enable
the LogManager, see main() function under __init__().
'log_time' (boolean): See logmanager.LogManager add_time attribute.
'workers_number' (int): Number of download workers that download manager
will spawn. Must be greater than zero.
'locale_name' (string): Locale name (en_US)
'main_win_size' (tuple): Main window size (width x height).
if window becomes to small the program will reset its size.
see _settings_are_valid method MIN_FRAME_SIZE.
'opts_win_size' (tuple): Main window size (width x height).
'selected_video_formats' (list): List that contains the selected
video formats to display on the main window
'selected_audio_formats' (list): List that contains the selected
audio formats to display on the main window
'selected_format' (string): Current format selected on the main window
'youtube_dl_debug' (boolean): When True will pass '-v' flag to youtube-dl
config file options.
'ignore_config' (boolean): When True will ignore youtube-dl config file option.
'confirm_exit' (boolean): When True create message to confirm exist youtube-dlg
'native_hls' (boolean): When True youtube-dl will use the natives HLS implementation.
'show_completion_popup' (boolean): When True youtube-dl-dlg will create message to inform
the user for the download completion
'confirm_deletion' (boolean): When True ask user before item removal.
'nomtime' (boolean): When True will not use the last-modified header to
set the file modification time.
'embed_thumbnail' (boolean): When True will embed the thumbnail in
the audio file as cover art.
'add_metadata' (boolean): When True will write metadata to file.
"""
logging.debug('load_options default___________________')
self.options = {'save_path': os.path.expanduser('~'),
'save_path_dirs': [os.path.expanduser('~'), os.path.join(os.
path.expanduser('~'), 'Downloads'), os.path.join(os.path.
expanduser('~'), 'Desktop'), os.path.join(os.path.expanduser(
'~'), 'Videos'), os.path.join(os.path.expanduser('~'), 'Music')
], 'video_format': '0', 'second_video_format': '0', 'to_audio':
False, 'keep_video': False, 'audio_format': '', 'audio_quality':
'5', 'restrict_filenames': False, 'output_format': 1,
'output_template': os.path.join('%(uploader)s',
'%(title)s.%(ext)s'), 'playlist_start': 1, 'playlist_end': 0,
'max_downloads': 0, 'min_filesize': 0, 'max_filesize': 0,
'min_filesize_unit': '', 'max_filesize_unit': '', 'write_subs':
True, 'write_all_subs': False, 'write_auto_subs': False,
'embed_subs': False, 'subs_lang': 'en', 'ignore_errors': True,
'open_dl_dir': False, 'write_description': False, 'write_info':
False, 'write_thumbnail': False, 'retries': 10, 'user_agent':
'', 'referer': '', 'proxy': '', 'shutdown': False,
'sudo_password': '', 'username': '', 'password': '',
'video_password': '', 'youtubedl_path': self.config_path,
'cmd_args': '', 'enable_log': True, 'log_time': True,
'workers_number': 3, 'locale_name': 'en_US', 'main_win_size': (
740, 490), 'opts_win_size': (640, 490),
'selected_video_formats': ['default', 'mp4', 'webm'],
'selected_audio_formats': ['mp3', 'm4a', 'vorbis'],
'selected_format': '0', 'youtube_dl_debug': False,
'ignore_config': True, 'confirm_exit': True, 'native_hls': True,
'show_completion_popup': True, 'confirm_deletion': True,
'nomtime': False, 'embed_thumbnail': False, 'add_metadata': False}
def load_from_file(self):
"""
Load options from settings file.
"""
if not os.path.exists(self.settings_file):
return
with open(self.settings_file, 'rb') as settings_file:
try:
options = json.load(settings_file)
if self._settings_coordinate(options):
self.options = options
except:
self.load_default()
def save_to_file(self):
"""Save options to settings file.
"""
check_path(self.config_path)
with open(self.settings_file, 'w') as settings_file:
options = self._get_options()
json.dump(options, settings_file, indent=4, separators=(',', ': '))
def _settings_coordinate(self, settings_dict):
"""
Check settings.json dictionary
Args:
settings_dict: Options dict loaded. See load_from_file() method.
Return:
True if settings.json is valid / else False
"""
VALID_VIDEO_FORMAT = ('0', '17', '36', '5', '34', '35', '43', '44',
'45', '46', '18', '22', '37', '38', '160', '133', '134', '135',
'136', '137', '264', '138', '242', '243', '244', '247', '248',
'271', '272', '82', '83', '84', '85', '100', '101', '102',
'139', '140', '141', '171', '172')
VALID_AUDIO_FORMAT = 'mp3', 'wav', 'aac', 'm4a', 'vorbis', 'opus', ''
VALID_AUDIO_QUALITY = '0', '5', '9'
VALID_FILESIZE_UNIT = '', 'k', 'm', 'g', 't', 'p', 'e', 'z', 'y'
VALID_SUB_LANGUAGE = ('en', 'el', 'pt', 'fr', 'it', 'ru', 'es',
'de', 'he', 'sv', 'tr')
MIN_FRAME_SIZE = 100
for key in self.options:
if key not in settings_dict:
return False
if type(self.options[key]) != type(settings_dict[key]):
return False
rules_dict = {'video_format': FORMATS.keys(), 'second_video_format':
VALID_VIDEO_FORMAT, 'audio_format': VALID_AUDIO_FORMAT,
'audio_quality': VALID_AUDIO_QUALITY, 'output_format':
OUTPUT_FORMATS.keys(), 'min_filesize_unit': VALID_FILESIZE_UNIT,
'max_filesize_unit': VALID_FILESIZE_UNIT, 'subs_lang':
VALID_SUB_LANGUAGE}
for key, valid_list in rules_dict.items():
if settings_dict[key] not in valid_list:
return False
if settings_dict['workers_number'] < 1:
return False
return True
def _get_options(self):
"""
Return options dictionary.
"""
tmp_options = self.options.copy()
for key in self.SENSITIVE_KEYS:
tmp_options[key] = ''
return tmp_options
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class OptionsManager(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
logging.basicConfig(format=format, level=logging.INFO, datefmt='%H:%M:%S')
logging.getLogger().setLevel(logging.DEBUG)
def __init__(self, config_path):
self.config_path = config_path
self.settings_file = os.path.join(config_path, self.SETTINGS_FILENAME)
self.options = dict()
self.load_default()
self.load_from_file()
def load_default(self):
"""Load the default options.
Note:
This method is automatically called by the constructor.
Options Description:
'save_path' (string): Path where youtube-dl shoult store
the downloaded file. default is $HOME (~\\Downloads)
'save_path_dirs' (list): List that contains temporary save paths.
'video_format' (string): Video format to download.
When this option is '0' youtube-dl will choose
the best video format for given URL.
'second_video_format' (string): Video format to mix with the
one (-f 18+17)
'to_audio' (boolean): If True youtube-dl will post process the
video file.
'keep_video' (boolean): If True youtube-dl will keep the video
after post processing it.
'audio_format' (string): Audio format of the post processed file.
values are: mp3, wav, aac, m4a, vorbis, opus.
'audio_quality' (string): Audio quality of the post processed file.
values are: 9, 5, 0. The lowest value the better the quality.
'restrict_filenames' (boolean): If True youtube-dl will restrict
the downloaded file filename to ASCII characters only.
'output_format' (int): This options sets the downloaded file
output template. See formats.OUTPUT_FORMATS for mor info.
'output_template' (string) : Can be any output template supported
by youtube-dl
'playlist_start' (int): Playlist index to start downloading
'playlist_end' (int): Playlist index to stop downloading.
'max_downloads' (int): Maximun number of video files to download
from the given playlist.
'min_filesize' (float): Min file size of the video file.
if the video is smaller than the given size then
youtube-dl will abort the download process.
'max_filesize' (float): Min file size of the video file.
if the video is larger than the given size then
youtube-dl will abort the download process.
'min_filesize_unit' (string): Minimum file size unit.
values are: '', k, m, g, y, p, e, z, y.
'max_filesize_unit' (string): Maximum file size unit.
values are: '', k, m, g, y, p, e, z, y.
'write_subs' (boolean): If True youtube-dl will try to download
the subtitles file for the given URL.
'write_all_subs' (boolean): If True youtube-dl will try to download
all the available subtitles for the given URL.
'write_auto_subs' (boolean): If True youtube-dl will try to download
the automatic subtitlees file for the given URL.
'embed_subs' (boolean): If True youtube-dl will try to merge the
subtitles file with the video. (ONLY mp4 files)
'subs_lang' (string): Language of the subtitles file to download.
Needs 'write_subs' option.
'ignore_errors' (boolean): If True youtube-dl will ignore the errors
and continue the download process.
'open_dl_dir' (boolean): If True youtube-dl will open the destination
folder after download process has been completed.
'write_description' (boolean): If True youtube-dl will the video
description to a *.description file.
'write_info' (boolean): If True youtube-dl will write
video metadata to a *.info.json file.
'write_thumbnail' (boolean): If True youtube-dl will write a
thumbnail image to disk.
'retries' (int): Number of youtube-dl retries.
'user_agent' (string): Specify a custom user agent for youtube-dl
'referer' (string): Specify a custom referer to user if the video
access is restricted to one domain.
'proxy' (string): Use the specified HTTP/HTTPS proxy.
'shutdown' (boolean): Shutdown PC after download process completed.
'sudo_password' (string): SUDO password for the shutdown process
if the user does not have elevated privileges.
'username' (string): Username to login with.
'password' (string): Password to login with.
'video_password' (string): Video Password for the given URL.
'youtubedl_path' (string): Absolute the path to the youtube-dl binary.
Default is the self.config_path. You can change this position
to point anywhere if you want to use the youtube-dl binary on your system.
This is also the directory where youtube-dlg will auto download the
youtube-dl if not exists so you should make sure you have write access
if you want to update the youtube-dl binary from within youtube-dlg.
'cmd_args' (string): String that contains extra youtube-dl options
seperated by spaces.
'enable_log' (boolean): If True youtube-dlg will enable
the LogManager, see main() function under __init__().
'log_time' (boolean): See logmanager.LogManager add_time attribute.
'workers_number' (int): Number of download workers that download manager
will spawn. Must be greater than zero.
'locale_name' (string): Locale name (en_US)
'main_win_size' (tuple): Main window size (width x height).
if window becomes to small the program will reset its size.
see _settings_are_valid method MIN_FRAME_SIZE.
'opts_win_size' (tuple): Main window size (width x height).
'selected_video_formats' (list): List that contains the selected
video formats to display on the main window
'selected_audio_formats' (list): List that contains the selected
audio formats to display on the main window
'selected_format' (string): Current format selected on the main window
'youtube_dl_debug' (boolean): When True will pass '-v' flag to youtube-dl
config file options.
'ignore_config' (boolean): When True will ignore youtube-dl config file option.
'confirm_exit' (boolean): When True create message to confirm exist youtube-dlg
'native_hls' (boolean): When True youtube-dl will use the natives HLS implementation.
'show_completion_popup' (boolean): When True youtube-dl-dlg will create message to inform
the user for the download completion
'confirm_deletion' (boolean): When True ask user before item removal.
'nomtime' (boolean): When True will not use the last-modified header to
set the file modification time.
'embed_thumbnail' (boolean): When True will embed the thumbnail in
the audio file as cover art.
'add_metadata' (boolean): When True will write metadata to file.
"""
logging.debug('load_options default___________________')
self.options = {'save_path': os.path.expanduser('~'),
'save_path_dirs': [os.path.expanduser('~'), os.path.join(os.
path.expanduser('~'), 'Downloads'), os.path.join(os.path.
expanduser('~'), 'Desktop'), os.path.join(os.path.expanduser(
'~'), 'Videos'), os.path.join(os.path.expanduser('~'), 'Music')
], 'video_format': '0', 'second_video_format': '0', 'to_audio':
False, 'keep_video': False, 'audio_format': '', 'audio_quality':
'5', 'restrict_filenames': False, 'output_format': 1,
'output_template': os.path.join('%(uploader)s',
'%(title)s.%(ext)s'), 'playlist_start': 1, 'playlist_end': 0,
'max_downloads': 0, 'min_filesize': 0, 'max_filesize': 0,
'min_filesize_unit': '', 'max_filesize_unit': '', 'write_subs':
True, 'write_all_subs': False, 'write_auto_subs': False,
'embed_subs': False, 'subs_lang': 'en', 'ignore_errors': True,
'open_dl_dir': False, 'write_description': False, 'write_info':
False, 'write_thumbnail': False, 'retries': 10, 'user_agent':
'', 'referer': '', 'proxy': '', 'shutdown': False,
'sudo_password': '', 'username': '', 'password': '',
'video_password': '', 'youtubedl_path': self.config_path,
'cmd_args': '', 'enable_log': True, 'log_time': True,
'workers_number': 3, 'locale_name': 'en_US', 'main_win_size': (
740, 490), 'opts_win_size': (640, 490),
'selected_video_formats': ['default', 'mp4', 'webm'],
'selected_audio_formats': ['mp3', 'm4a', 'vorbis'],
'selected_format': '0', 'youtube_dl_debug': False,
'ignore_config': True, 'confirm_exit': True, 'native_hls': True,
'show_completion_popup': True, 'confirm_deletion': True,
'nomtime': False, 'embed_thumbnail': False, 'add_metadata': False}
def load_from_file(self):
"""
Load options from settings file.
"""
if not os.path.exists(self.settings_file):
return
with open(self.settings_file, 'rb') as settings_file:
try:
options = json.load(settings_file)
if self._settings_coordinate(options):
self.options = options
except:
self.load_default()
def save_to_file(self):
"""Save options to settings file.
"""
check_path(self.config_path)
with open(self.settings_file, 'w') as settings_file:
options = self._get_options()
json.dump(options, settings_file, indent=4, separators=(',', ': '))
def _settings_coordinate(self, settings_dict):
"""
Check settings.json dictionary
Args:
settings_dict: Options dict loaded. See load_from_file() method.
Return:
True if settings.json is valid / else False
"""
VALID_VIDEO_FORMAT = ('0', '17', '36', '5', '34', '35', '43', '44',
'45', '46', '18', '22', '37', '38', '160', '133', '134', '135',
'136', '137', '264', '138', '242', '243', '244', '247', '248',
'271', '272', '82', '83', '84', '85', '100', '101', '102',
'139', '140', '141', '171', '172')
VALID_AUDIO_FORMAT = 'mp3', 'wav', 'aac', 'm4a', 'vorbis', 'opus', ''
VALID_AUDIO_QUALITY = '0', '5', '9'
VALID_FILESIZE_UNIT = '', 'k', 'm', 'g', 't', 'p', 'e', 'z', 'y'
VALID_SUB_LANGUAGE = ('en', 'el', 'pt', 'fr', 'it', 'ru', 'es',
'de', 'he', 'sv', 'tr')
MIN_FRAME_SIZE = 100
for key in self.options:
if key not in settings_dict:
return False
if type(self.options[key]) != type(settings_dict[key]):
return False
rules_dict = {'video_format': FORMATS.keys(), 'second_video_format':
VALID_VIDEO_FORMAT, 'audio_format': VALID_AUDIO_FORMAT,
'audio_quality': VALID_AUDIO_QUALITY, 'output_format':
OUTPUT_FORMATS.keys(), 'min_filesize_unit': VALID_FILESIZE_UNIT,
'max_filesize_unit': VALID_FILESIZE_UNIT, 'subs_lang':
VALID_SUB_LANGUAGE}
for key, valid_list in rules_dict.items():
if settings_dict[key] not in valid_list:
return False
if settings_dict['workers_number'] < 1:
return False
return True
def _get_options(self):
"""
Return options dictionary.
"""
tmp_options = self.options.copy()
for key in self.SENSITIVE_KEYS:
tmp_options[key] = ''
return tmp_options
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class OptionsManager(object):
"""
This clas is responsible for storing & retrieving the options.
Args:
config_path (string): Absolute path where OptionsManager
should store the settings file.
Note:
See load_default() method for available options.
Example:
Access the options using the 'options' variable.
opt_manager = OptionsManager('.')
opt_manager.options['save_path'] = '~/Downloads'
"""
SETTINGS_FILENAME = 'settings.json'
SENSITIVE_KEYS = 'sudo_password', 'password', 'video_password'
format = '%(asctime)s: %(message)s'
logging.basicConfig(format=format, level=logging.INFO, datefmt='%H:%M:%S')
logging.getLogger().setLevel(logging.DEBUG)
def __init__(self, config_path):
self.config_path = config_path
self.settings_file = os.path.join(config_path, self.SETTINGS_FILENAME)
self.options = dict()
self.load_default()
self.load_from_file()
def load_default(self):
"""Load the default options.
Note:
This method is automatically called by the constructor.
Options Description:
'save_path' (string): Path where youtube-dl shoult store
the downloaded file. default is $HOME (~\\Downloads)
'save_path_dirs' (list): List that contains temporary save paths.
'video_format' (string): Video format to download.
When this option is '0' youtube-dl will choose
the best video format for given URL.
'second_video_format' (string): Video format to mix with the
one (-f 18+17)
'to_audio' (boolean): If True youtube-dl will post process the
video file.
'keep_video' (boolean): If True youtube-dl will keep the video
after post processing it.
'audio_format' (string): Audio format of the post processed file.
values are: mp3, wav, aac, m4a, vorbis, opus.
'audio_quality' (string): Audio quality of the post processed file.
values are: 9, 5, 0. The lowest value the better the quality.
'restrict_filenames' (boolean): If True youtube-dl will restrict
the downloaded file filename to ASCII characters only.
'output_format' (int): This options sets the downloaded file
output template. See formats.OUTPUT_FORMATS for mor info.
'output_template' (string) : Can be any output template supported
by youtube-dl
'playlist_start' (int): Playlist index to start downloading
'playlist_end' (int): Playlist index to stop downloading.
'max_downloads' (int): Maximun number of video files to download
from the given playlist.
'min_filesize' (float): Min file size of the video file.
if the video is smaller than the given size then
youtube-dl will abort the download process.
'max_filesize' (float): Min file size of the video file.
if the video is larger than the given size then
youtube-dl will abort the download process.
'min_filesize_unit' (string): Minimum file size unit.
values are: '', k, m, g, y, p, e, z, y.
'max_filesize_unit' (string): Maximum file size unit.
values are: '', k, m, g, y, p, e, z, y.
'write_subs' (boolean): If True youtube-dl will try to download
the subtitles file for the given URL.
'write_all_subs' (boolean): If True youtube-dl will try to download
all the available subtitles for the given URL.
'write_auto_subs' (boolean): If True youtube-dl will try to download
the automatic subtitlees file for the given URL.
'embed_subs' (boolean): If True youtube-dl will try to merge the
subtitles file with the video. (ONLY mp4 files)
'subs_lang' (string): Language of the subtitles file to download.
Needs 'write_subs' option.
'ignore_errors' (boolean): If True youtube-dl will ignore the errors
and continue the download process.
'open_dl_dir' (boolean): If True youtube-dl will open the destination
folder after download process has been completed.
'write_description' (boolean): If True youtube-dl will the video
description to a *.description file.
'write_info' (boolean): If True youtube-dl will write
video metadata to a *.info.json file.
'write_thumbnail' (boolean): If True youtube-dl will write a
thumbnail image to disk.
'retries' (int): Number of youtube-dl retries.
'user_agent' (string): Specify a custom user agent for youtube-dl
'referer' (string): Specify a custom referer to user if the video
access is restricted to one domain.
'proxy' (string): Use the specified HTTP/HTTPS proxy.
'shutdown' (boolean): Shutdown PC after download process completed.
'sudo_password' (string): SUDO password for the shutdown process
if the user does not have elevated privileges.
'username' (string): Username to login with.
'password' (string): Password to login with.
'video_password' (string): Video Password for the given URL.
'youtubedl_path' (string): Absolute the path to the youtube-dl binary.
Default is the self.config_path. You can change this position
to point anywhere if you want to use the youtube-dl binary on your system.
This is also the directory where youtube-dlg will auto download the
youtube-dl if not exists so you should make sure you have write access
if you want to update the youtube-dl binary from within youtube-dlg.
'cmd_args' (string): String that contains extra youtube-dl options
seperated by spaces.
'enable_log' (boolean): If True youtube-dlg will enable
the LogManager, see main() function under __init__().
'log_time' (boolean): See logmanager.LogManager add_time attribute.
'workers_number' (int): Number of download workers that download manager
will spawn. Must be greater than zero.
'locale_name' (string): Locale name (en_US)
'main_win_size' (tuple): Main window size (width x height).
if window becomes to small the program will reset its size.
see _settings_are_valid method MIN_FRAME_SIZE.
'opts_win_size' (tuple): Main window size (width x height).
'selected_video_formats' (list): List that contains the selected
video formats to display on the main window
'selected_audio_formats' (list): List that contains the selected
audio formats to display on the main window
'selected_format' (string): Current format selected on the main window
'youtube_dl_debug' (boolean): When True will pass '-v' flag to youtube-dl
config file options.
'ignore_config' (boolean): When True will ignore youtube-dl config file option.
'confirm_exit' (boolean): When True create message to confirm exist youtube-dlg
'native_hls' (boolean): When True youtube-dl will use the natives HLS implementation.
'show_completion_popup' (boolean): When True youtube-dl-dlg will create message to inform
the user for the download completion
'confirm_deletion' (boolean): When True ask user before item removal.
'nomtime' (boolean): When True will not use the last-modified header to
set the file modification time.
'embed_thumbnail' (boolean): When True will embed the thumbnail in
the audio file as cover art.
'add_metadata' (boolean): When True will write metadata to file.
"""
logging.debug('load_options default___________________')
self.options = {'save_path': os.path.expanduser('~'),
'save_path_dirs': [os.path.expanduser('~'), os.path.join(os.
path.expanduser('~'), 'Downloads'), os.path.join(os.path.
expanduser('~'), 'Desktop'), os.path.join(os.path.expanduser(
'~'), 'Videos'), os.path.join(os.path.expanduser('~'), 'Music')
], 'video_format': '0', 'second_video_format': '0', 'to_audio':
False, 'keep_video': False, 'audio_format': '', 'audio_quality':
'5', 'restrict_filenames': False, 'output_format': 1,
'output_template': os.path.join('%(uploader)s',
'%(title)s.%(ext)s'), 'playlist_start': 1, 'playlist_end': 0,
'max_downloads': 0, 'min_filesize': 0, 'max_filesize': 0,
'min_filesize_unit': '', 'max_filesize_unit': '', 'write_subs':
True, 'write_all_subs': False, 'write_auto_subs': False,
'embed_subs': False, 'subs_lang': 'en', 'ignore_errors': True,
'open_dl_dir': False, 'write_description': False, 'write_info':
False, 'write_thumbnail': False, 'retries': 10, 'user_agent':
'', 'referer': '', 'proxy': '', 'shutdown': False,
'sudo_password': '', 'username': '', 'password': '',
'video_password': '', 'youtubedl_path': self.config_path,
'cmd_args': '', 'enable_log': True, 'log_time': True,
'workers_number': 3, 'locale_name': 'en_US', 'main_win_size': (
740, 490), 'opts_win_size': (640, 490),
'selected_video_formats': ['default', 'mp4', 'webm'],
'selected_audio_formats': ['mp3', 'm4a', 'vorbis'],
'selected_format': '0', 'youtube_dl_debug': False,
'ignore_config': True, 'confirm_exit': True, 'native_hls': True,
'show_completion_popup': True, 'confirm_deletion': True,
'nomtime': False, 'embed_thumbnail': False, 'add_metadata': False}
def load_from_file(self):
"""
Load options from settings file.
"""
if not os.path.exists(self.settings_file):
return
with open(self.settings_file, 'rb') as settings_file:
try:
options = json.load(settings_file)
if self._settings_coordinate(options):
self.options = options
except:
self.load_default()
def save_to_file(self):
"""Save options to settings file.
"""
check_path(self.config_path)
with open(self.settings_file, 'w') as settings_file:
options = self._get_options()
json.dump(options, settings_file, indent=4, separators=(',', ': '))
def _settings_coordinate(self, settings_dict):
"""
Check settings.json dictionary
Args:
settings_dict: Options dict loaded. See load_from_file() method.
Return:
True if settings.json is valid / else False
"""
VALID_VIDEO_FORMAT = ('0', '17', '36', '5', '34', '35', '43', '44',
'45', '46', '18', '22', '37', '38', '160', '133', '134', '135',
'136', '137', '264', '138', '242', '243', '244', '247', '248',
'271', '272', '82', '83', '84', '85', '100', '101', '102',
'139', '140', '141', '171', '172')
VALID_AUDIO_FORMAT = 'mp3', 'wav', 'aac', 'm4a', 'vorbis', 'opus', ''
VALID_AUDIO_QUALITY = '0', '5', '9'
VALID_FILESIZE_UNIT = '', 'k', 'm', 'g', 't', 'p', 'e', 'z', 'y'
VALID_SUB_LANGUAGE = ('en', 'el', 'pt', 'fr', 'it', 'ru', 'es',
'de', 'he', 'sv', 'tr')
MIN_FRAME_SIZE = 100
for key in self.options:
if key not in settings_dict:
return False
if type(self.options[key]) != type(settings_dict[key]):
return False
rules_dict = {'video_format': FORMATS.keys(), 'second_video_format':
VALID_VIDEO_FORMAT, 'audio_format': VALID_AUDIO_FORMAT,
'audio_quality': VALID_AUDIO_QUALITY, 'output_format':
OUTPUT_FORMATS.keys(), 'min_filesize_unit': VALID_FILESIZE_UNIT,
'max_filesize_unit': VALID_FILESIZE_UNIT, 'subs_lang':
VALID_SUB_LANGUAGE}
for key, valid_list in rules_dict.items():
if settings_dict[key] not in valid_list:
return False
if settings_dict['workers_number'] < 1:
return False
return True
def _get_options(self):
"""
Return options dictionary.
"""
tmp_options = self.options.copy()
for key in self.SENSITIVE_KEYS:
tmp_options[key] = ''
return tmp_options
<|reserved_special_token_1|>
# This Python file uses the following encoding: utf-8
import json
import os
import logging
from .utility_helper import (
check_path,
)
from .formats import (
OUTPUT_FORMATS,
FORMATS
)
class OptionsManager(object):
"""
This clas is responsible for storing & retrieving the options.
Args:
config_path (string): Absolute path where OptionsManager
should store the settings file.
Note:
See load_default() method for available options.
Example:
Access the options using the 'options' variable.
opt_manager = OptionsManager('.')
opt_manager.options['save_path'] = '~/Downloads'
"""
SETTINGS_FILENAME = 'settings.json'
SENSITIVE_KEYS = ('sudo_password', 'password', 'video_password')
format = "%(asctime)s: %(message)s"
logging.basicConfig(format=format, level=logging.INFO, datefmt='%H:%M:%S')
logging.getLogger().setLevel(logging.DEBUG)
def __init__(self, config_path):
self.config_path = config_path
self.settings_file = os.path.join(config_path, self.SETTINGS_FILENAME)
self.options = dict()
self.load_default()
self.load_from_file()
def load_default(self):
"""Load the default options.
Note:
This method is automatically called by the constructor.
Options Description:
'save_path' (string): Path where youtube-dl shoult store
the downloaded file. default is $HOME (~\Downloads)
'save_path_dirs' (list): List that contains temporary save paths.
'video_format' (string): Video format to download.
When this option is '0' youtube-dl will choose
the best video format for given URL.
'second_video_format' (string): Video format to mix with the
one (-f 18+17)
'to_audio' (boolean): If True youtube-dl will post process the
video file.
'keep_video' (boolean): If True youtube-dl will keep the video
after post processing it.
'audio_format' (string): Audio format of the post processed file.
values are: mp3, wav, aac, m4a, vorbis, opus.
'audio_quality' (string): Audio quality of the post processed file.
values are: 9, 5, 0. The lowest value the better the quality.
'restrict_filenames' (boolean): If True youtube-dl will restrict
the downloaded file filename to ASCII characters only.
'output_format' (int): This options sets the downloaded file
output template. See formats.OUTPUT_FORMATS for mor info.
'output_template' (string) : Can be any output template supported
by youtube-dl
'playlist_start' (int): Playlist index to start downloading
'playlist_end' (int): Playlist index to stop downloading.
'max_downloads' (int): Maximun number of video files to download
from the given playlist.
'min_filesize' (float): Min file size of the video file.
if the video is smaller than the given size then
youtube-dl will abort the download process.
'max_filesize' (float): Min file size of the video file.
if the video is larger than the given size then
youtube-dl will abort the download process.
'min_filesize_unit' (string): Minimum file size unit.
values are: '', k, m, g, y, p, e, z, y.
'max_filesize_unit' (string): Maximum file size unit.
values are: '', k, m, g, y, p, e, z, y.
'write_subs' (boolean): If True youtube-dl will try to download
the subtitles file for the given URL.
'write_all_subs' (boolean): If True youtube-dl will try to download
all the available subtitles for the given URL.
'write_auto_subs' (boolean): If True youtube-dl will try to download
the automatic subtitlees file for the given URL.
'embed_subs' (boolean): If True youtube-dl will try to merge the
subtitles file with the video. (ONLY mp4 files)
'subs_lang' (string): Language of the subtitles file to download.
Needs 'write_subs' option.
'ignore_errors' (boolean): If True youtube-dl will ignore the errors
and continue the download process.
'open_dl_dir' (boolean): If True youtube-dl will open the destination
folder after download process has been completed.
'write_description' (boolean): If True youtube-dl will the video
description to a *.description file.
'write_info' (boolean): If True youtube-dl will write
video metadata to a *.info.json file.
'write_thumbnail' (boolean): If True youtube-dl will write a
thumbnail image to disk.
'retries' (int): Number of youtube-dl retries.
'user_agent' (string): Specify a custom user agent for youtube-dl
'referer' (string): Specify a custom referer to user if the video
access is restricted to one domain.
'proxy' (string): Use the specified HTTP/HTTPS proxy.
'shutdown' (boolean): Shutdown PC after download process completed.
'sudo_password' (string): SUDO password for the shutdown process
if the user does not have elevated privileges.
'username' (string): Username to login with.
'password' (string): Password to login with.
'video_password' (string): Video Password for the given URL.
'youtubedl_path' (string): Absolute the path to the youtube-dl binary.
Default is the self.config_path. You can change this position
to point anywhere if you want to use the youtube-dl binary on your system.
This is also the directory where youtube-dlg will auto download the
youtube-dl if not exists so you should make sure you have write access
if you want to update the youtube-dl binary from within youtube-dlg.
'cmd_args' (string): String that contains extra youtube-dl options
seperated by spaces.
'enable_log' (boolean): If True youtube-dlg will enable
the LogManager, see main() function under __init__().
'log_time' (boolean): See logmanager.LogManager add_time attribute.
'workers_number' (int): Number of download workers that download manager
will spawn. Must be greater than zero.
'locale_name' (string): Locale name (en_US)
'main_win_size' (tuple): Main window size (width x height).
if window becomes to small the program will reset its size.
see _settings_are_valid method MIN_FRAME_SIZE.
'opts_win_size' (tuple): Main window size (width x height).
'selected_video_formats' (list): List that contains the selected
video formats to display on the main window
'selected_audio_formats' (list): List that contains the selected
audio formats to display on the main window
'selected_format' (string): Current format selected on the main window
'youtube_dl_debug' (boolean): When True will pass '-v' flag to youtube-dl
config file options.
'ignore_config' (boolean): When True will ignore youtube-dl config file option.
'confirm_exit' (boolean): When True create message to confirm exist youtube-dlg
'native_hls' (boolean): When True youtube-dl will use the natives HLS implementation.
'show_completion_popup' (boolean): When True youtube-dl-dlg will create message to inform
the user for the download completion
'confirm_deletion' (boolean): When True ask user before item removal.
'nomtime' (boolean): When True will not use the last-modified header to
set the file modification time.
'embed_thumbnail' (boolean): When True will embed the thumbnail in
the audio file as cover art.
'add_metadata' (boolean): When True will write metadata to file.
"""
#+++++<DEBUG_LOG>
logging.debug("load_options default___________________")
#-----<DEBUG_LOG>
self.options = {
'save_path' : os.path.expanduser('~'),
'save_path_dirs': [
os.path.expanduser('~'),
os.path.join(os.path.expanduser('~'), "Downloads"),
os.path.join(os.path.expanduser('~'), "Desktop"),
os.path.join(os.path.expanduser('~'), "Videos"),
os.path.join(os.path.expanduser('~'), "Music"),
],
'video_format': '0',
'second_video_format': '0',
'to_audio': False,
'keep_video': False,
'audio_format': '',
'audio_quality': '5',
'restrict_filenames': False,
'output_format': 1,
'output_template': os.path.join('%(uploader)s', '%(title)s.%(ext)s'),
'playlist_start': 1,
'playlist_end': 0,
'max_downloads': 0,
'min_filesize': 0,
'max_filesize': 0,
'min_filesize_unit': '',
'max_filesize_unit': '',
'write_subs': True,
'write_all_subs': False,
'write_auto_subs': False,
'embed_subs': False,
'subs_lang': 'en',
'ignore_errors': True,
'open_dl_dir': False,
'write_description': False,
'write_info': False,
'write_thumbnail': False,
'retries': 10,
'user_agent': '',
'referer': '',
'proxy': '',
'shutdown': False,
'sudo_password': '',
'username': '',
'password': '',
'video_password': '',
'youtubedl_path': self.config_path,
'cmd_args': '',
'enable_log': True,
'log_time': True,
'workers_number': 3,
'locale_name': 'en_US',
'main_win_size': (740, 490),
'opts_win_size': (640, 490),
'selected_video_formats': ['default', 'mp4', 'webm'],
'selected_audio_formats': ['mp3', 'm4a', 'vorbis'],
'selected_format': '0',
'youtube_dl_debug': False,
'ignore_config': True,
'confirm_exit': True,
'native_hls': True,
'show_completion_popup': True,
'confirm_deletion': True,
'nomtime': False,
'embed_thumbnail': False,
'add_metadata': False
}
def load_from_file(self):
"""
Load options from settings file.
"""
if not os.path.exists(self.settings_file):
return
with open(self.settings_file, 'rb') as settings_file:
try:
options = json.load(settings_file)
if self._settings_coordinate(options):
self.options = options
except:
self.load_default()
def save_to_file(self):
"""Save options to settings file.
"""
check_path(self.config_path)
with open(self.settings_file, 'w') as settings_file:
options = self._get_options()
json.dump(options,
settings_file,
indent=4,
separators=(',', ': '))
def _settings_coordinate(self, settings_dict):
"""
Check settings.json dictionary
Args:
settings_dict: Options dict loaded. See load_from_file() method.
Return:
True if settings.json is valid / else False
"""
VALID_VIDEO_FORMAT = ('0', '17', '36', '5', '34', '35', '43', '44', '45',
'46', '18', '22', '37', '38', '160', '133', '134', '135', '136','137',
'264', '138', '242', '243', '244', '247', '248', '271', '272', '82',
'83', '84', '85', '100', '101', '102', '139', '140', '141', '171', '172')
VALID_AUDIO_FORMAT = ('mp3', 'wav', 'aac', 'm4a', 'vorbis', 'opus', '')
VALID_AUDIO_QUALITY = ('0', '5', '9')
VALID_FILESIZE_UNIT = ('', 'k', 'm', 'g', 't', 'p', 'e', 'z', 'y')
VALID_SUB_LANGUAGE = ('en', 'el', 'pt', 'fr', 'it', 'ru', 'es', 'de', 'he', 'sv', 'tr')
MIN_FRAME_SIZE = 100
for key in self.options:
if key not in settings_dict:
return False
if type(self.options[key]) != type(settings_dict[key]):
return False
# Check if each key has a valid value
rules_dict = {
'video_format': FORMATS.keys(),
'second_video_format': VALID_VIDEO_FORMAT,
'audio_format': VALID_AUDIO_FORMAT,
'audio_quality': VALID_AUDIO_QUALITY,
'output_format': OUTPUT_FORMATS.keys(),
'min_filesize_unit': VALID_FILESIZE_UNIT,
'max_filesize_unit': VALID_FILESIZE_UNIT,
'subs_lang': VALID_SUB_LANGUAGE
}
for key, valid_list in rules_dict.items():
if settings_dict[key] not in valid_list:
return False
if settings_dict['workers_number'] < 1:
return False
return True
def _get_options(self):
"""
Return options dictionary.
"""
tmp_options = self.options.copy()
for key in self.SENSITIVE_KEYS:
tmp_options[key] = ''
return tmp_options
|
flexible
|
{
"blob_id": "92529c4d4c33a7473773f081f730e64bae4d7f54",
"index": 5742,
"step-1": "<mask token>\n\n\nclass OptionsManager(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n logging.basicConfig(format=format, level=logging.INFO, datefmt='%H:%M:%S')\n logging.getLogger().setLevel(logging.DEBUG)\n <mask token>\n <mask token>\n <mask token>\n\n def save_to_file(self):\n \"\"\"Save options to settings file.\n \"\"\"\n check_path(self.config_path)\n with open(self.settings_file, 'w') as settings_file:\n options = self._get_options()\n json.dump(options, settings_file, indent=4, separators=(',', ': '))\n\n def _settings_coordinate(self, settings_dict):\n \"\"\"\n Check settings.json dictionary\n\n Args: \n settings_dict: Options dict loaded. See load_from_file() method.\n \n Return:\n True if settings.json is valid / else False\n \"\"\"\n VALID_VIDEO_FORMAT = ('0', '17', '36', '5', '34', '35', '43', '44',\n '45', '46', '18', '22', '37', '38', '160', '133', '134', '135',\n '136', '137', '264', '138', '242', '243', '244', '247', '248',\n '271', '272', '82', '83', '84', '85', '100', '101', '102',\n '139', '140', '141', '171', '172')\n VALID_AUDIO_FORMAT = 'mp3', 'wav', 'aac', 'm4a', 'vorbis', 'opus', ''\n VALID_AUDIO_QUALITY = '0', '5', '9'\n VALID_FILESIZE_UNIT = '', 'k', 'm', 'g', 't', 'p', 'e', 'z', 'y'\n VALID_SUB_LANGUAGE = ('en', 'el', 'pt', 'fr', 'it', 'ru', 'es',\n 'de', 'he', 'sv', 'tr')\n MIN_FRAME_SIZE = 100\n for key in self.options:\n if key not in settings_dict:\n return False\n if type(self.options[key]) != type(settings_dict[key]):\n return False\n rules_dict = {'video_format': FORMATS.keys(), 'second_video_format':\n VALID_VIDEO_FORMAT, 'audio_format': VALID_AUDIO_FORMAT,\n 'audio_quality': VALID_AUDIO_QUALITY, 'output_format':\n OUTPUT_FORMATS.keys(), 'min_filesize_unit': VALID_FILESIZE_UNIT,\n 'max_filesize_unit': VALID_FILESIZE_UNIT, 'subs_lang':\n VALID_SUB_LANGUAGE}\n for key, valid_list in rules_dict.items():\n if settings_dict[key] not in valid_list:\n return False\n if settings_dict['workers_number'] < 1:\n return False\n return True\n\n def _get_options(self):\n \"\"\"\n Return options dictionary.\n \"\"\"\n tmp_options = self.options.copy()\n for key in self.SENSITIVE_KEYS:\n tmp_options[key] = ''\n return tmp_options\n",
"step-2": "<mask token>\n\n\nclass OptionsManager(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n logging.basicConfig(format=format, level=logging.INFO, datefmt='%H:%M:%S')\n logging.getLogger().setLevel(logging.DEBUG)\n <mask token>\n\n def load_default(self):\n \"\"\"Load the default options.\n\n Note:\n This method is automatically called by the constructor.\n\n Options Description:\n 'save_path' (string): Path where youtube-dl shoult store \n the downloaded file. default is $HOME (~\\\\Downloads)\n\n 'save_path_dirs' (list): List that contains temporary save paths. \n\n 'video_format' (string): Video format to download.\n When this option is '0' youtube-dl will choose \n the best video format for given URL.\n\n 'second_video_format' (string): Video format to mix with the \n one (-f 18+17)\n\n 'to_audio' (boolean): If True youtube-dl will post process the\n video file.\n\n 'keep_video' (boolean): If True youtube-dl will keep the video\n after post processing it.\n\n 'audio_format' (string): Audio format of the post processed file.\n values are: mp3, wav, aac, m4a, vorbis, opus.\n\n 'audio_quality' (string): Audio quality of the post processed file.\n values are: 9, 5, 0. The lowest value the better the quality.\n\n 'restrict_filenames' (boolean): If True youtube-dl will restrict \n the downloaded file filename to ASCII characters only.\n\n 'output_format' (int): This options sets the downloaded file \n output template. See formats.OUTPUT_FORMATS for mor info.\n\n 'output_template' (string) : Can be any output template supported \n by youtube-dl\n\n 'playlist_start' (int): Playlist index to start downloading\n\n 'playlist_end' (int): Playlist index to stop downloading.\n\n 'max_downloads' (int): Maximun number of video files to download\n from the given playlist.\n\n 'min_filesize' (float): Min file size of the video file.\n if the video is smaller than the given size then\n youtube-dl will abort the download process.\n\n 'max_filesize' (float): Min file size of the video file.\n if the video is larger than the given size then\n youtube-dl will abort the download process.\n\n 'min_filesize_unit' (string): Minimum file size unit.\n values are: '', k, m, g, y, p, e, z, y.\n\n 'max_filesize_unit' (string): Maximum file size unit.\n values are: '', k, m, g, y, p, e, z, y.\n\n 'write_subs' (boolean): If True youtube-dl will try to download \n the subtitles file for the given URL.\n\n 'write_all_subs' (boolean): If True youtube-dl will try to download\n all the available subtitles for the given URL.\n\n 'write_auto_subs' (boolean): If True youtube-dl will try to download \n the automatic subtitlees file for the given URL.\n\n 'embed_subs' (boolean): If True youtube-dl will try to merge the \n subtitles file with the video. (ONLY mp4 files)\n\n 'subs_lang' (string): Language of the subtitles file to download.\n Needs 'write_subs' option.\n\n 'ignore_errors' (boolean): If True youtube-dl will ignore the errors\n and continue the download process.\n\n 'open_dl_dir' (boolean): If True youtube-dl will open the destination\n folder after download process has been completed.\n\n 'write_description' (boolean): If True youtube-dl will the video\n description to a *.description file.\n\n 'write_info' (boolean): If True youtube-dl will write \n video metadata to a *.info.json file.\n\n 'write_thumbnail' (boolean): If True youtube-dl will write a \n thumbnail image to disk.\n\n 'retries' (int): Number of youtube-dl retries.\n\n 'user_agent' (string): Specify a custom user agent for youtube-dl\n\n 'referer' (string): Specify a custom referer to user if the video\n access is restricted to one domain.\n\n 'proxy' (string): Use the specified HTTP/HTTPS proxy.\n\n 'shutdown' (boolean): Shutdown PC after download process completed.\n\n 'sudo_password' (string): SUDO password for the shutdown process \n if the user does not have elevated privileges.\n\n 'username' (string): Username to login with.\n\n 'password' (string): Password to login with.\n\n 'video_password' (string): Video Password for the given URL. \n\n 'youtubedl_path' (string): Absolute the path to the youtube-dl binary.\n Default is the self.config_path. You can change this position \n to point anywhere if you want to use the youtube-dl binary on your system.\n This is also the directory where youtube-dlg will auto download the \n youtube-dl if not exists so you should make sure you have write access \n if you want to update the youtube-dl binary from within youtube-dlg.\n\n 'cmd_args' (string): String that contains extra youtube-dl options \n seperated by spaces.\n\n 'enable_log' (boolean): If True youtube-dlg will enable \n the LogManager, see main() function under __init__().\n\n 'log_time' (boolean): See logmanager.LogManager add_time attribute.\n\n 'workers_number' (int): Number of download workers that download manager \n will spawn. Must be greater than zero.\n\n 'locale_name' (string): Locale name (en_US)\n\n 'main_win_size' (tuple): Main window size (width x height).\n if window becomes to small the program will reset its size.\n see _settings_are_valid method MIN_FRAME_SIZE.\n\n 'opts_win_size' (tuple): Main window size (width x height).\n\n 'selected_video_formats' (list): List that contains the selected\n video formats to display on the main window\n\n 'selected_audio_formats' (list): List that contains the selected\n audio formats to display on the main window\n\n 'selected_format' (string): Current format selected on the main window\n\n 'youtube_dl_debug' (boolean): When True will pass '-v' flag to youtube-dl\n config file options.\n\n 'ignore_config' (boolean): When True will ignore youtube-dl config file option.\n\n 'confirm_exit' (boolean): When True create message to confirm exist youtube-dlg\n\n 'native_hls' (boolean): When True youtube-dl will use the natives HLS implementation.\n\n 'show_completion_popup' (boolean): When True youtube-dl-dlg will create message to inform \n the user for the download completion\n\n 'confirm_deletion' (boolean): When True ask user before item removal.\n\n 'nomtime' (boolean): When True will not use the last-modified header to \n set the file modification time.\n\n 'embed_thumbnail' (boolean): When True will embed the thumbnail in\n the audio file as cover art.\n\n 'add_metadata' (boolean): When True will write metadata to file. \n \"\"\"\n logging.debug('load_options default___________________')\n self.options = {'save_path': os.path.expanduser('~'),\n 'save_path_dirs': [os.path.expanduser('~'), os.path.join(os.\n path.expanduser('~'), 'Downloads'), os.path.join(os.path.\n expanduser('~'), 'Desktop'), os.path.join(os.path.expanduser(\n '~'), 'Videos'), os.path.join(os.path.expanduser('~'), 'Music')\n ], 'video_format': '0', 'second_video_format': '0', 'to_audio':\n False, 'keep_video': False, 'audio_format': '', 'audio_quality':\n '5', 'restrict_filenames': False, 'output_format': 1,\n 'output_template': os.path.join('%(uploader)s',\n '%(title)s.%(ext)s'), 'playlist_start': 1, 'playlist_end': 0,\n 'max_downloads': 0, 'min_filesize': 0, 'max_filesize': 0,\n 'min_filesize_unit': '', 'max_filesize_unit': '', 'write_subs':\n True, 'write_all_subs': False, 'write_auto_subs': False,\n 'embed_subs': False, 'subs_lang': 'en', 'ignore_errors': True,\n 'open_dl_dir': False, 'write_description': False, 'write_info':\n False, 'write_thumbnail': False, 'retries': 10, 'user_agent':\n '', 'referer': '', 'proxy': '', 'shutdown': False,\n 'sudo_password': '', 'username': '', 'password': '',\n 'video_password': '', 'youtubedl_path': self.config_path,\n 'cmd_args': '', 'enable_log': True, 'log_time': True,\n 'workers_number': 3, 'locale_name': 'en_US', 'main_win_size': (\n 740, 490), 'opts_win_size': (640, 490),\n 'selected_video_formats': ['default', 'mp4', 'webm'],\n 'selected_audio_formats': ['mp3', 'm4a', 'vorbis'],\n 'selected_format': '0', 'youtube_dl_debug': False,\n 'ignore_config': True, 'confirm_exit': True, 'native_hls': True,\n 'show_completion_popup': True, 'confirm_deletion': True,\n 'nomtime': False, 'embed_thumbnail': False, 'add_metadata': False}\n\n def load_from_file(self):\n \"\"\"\n Load options from settings file.\n \"\"\"\n if not os.path.exists(self.settings_file):\n return\n with open(self.settings_file, 'rb') as settings_file:\n try:\n options = json.load(settings_file)\n if self._settings_coordinate(options):\n self.options = options\n except:\n self.load_default()\n\n def save_to_file(self):\n \"\"\"Save options to settings file.\n \"\"\"\n check_path(self.config_path)\n with open(self.settings_file, 'w') as settings_file:\n options = self._get_options()\n json.dump(options, settings_file, indent=4, separators=(',', ': '))\n\n def _settings_coordinate(self, settings_dict):\n \"\"\"\n Check settings.json dictionary\n\n Args: \n settings_dict: Options dict loaded. See load_from_file() method.\n \n Return:\n True if settings.json is valid / else False\n \"\"\"\n VALID_VIDEO_FORMAT = ('0', '17', '36', '5', '34', '35', '43', '44',\n '45', '46', '18', '22', '37', '38', '160', '133', '134', '135',\n '136', '137', '264', '138', '242', '243', '244', '247', '248',\n '271', '272', '82', '83', '84', '85', '100', '101', '102',\n '139', '140', '141', '171', '172')\n VALID_AUDIO_FORMAT = 'mp3', 'wav', 'aac', 'm4a', 'vorbis', 'opus', ''\n VALID_AUDIO_QUALITY = '0', '5', '9'\n VALID_FILESIZE_UNIT = '', 'k', 'm', 'g', 't', 'p', 'e', 'z', 'y'\n VALID_SUB_LANGUAGE = ('en', 'el', 'pt', 'fr', 'it', 'ru', 'es',\n 'de', 'he', 'sv', 'tr')\n MIN_FRAME_SIZE = 100\n for key in self.options:\n if key not in settings_dict:\n return False\n if type(self.options[key]) != type(settings_dict[key]):\n return False\n rules_dict = {'video_format': FORMATS.keys(), 'second_video_format':\n VALID_VIDEO_FORMAT, 'audio_format': VALID_AUDIO_FORMAT,\n 'audio_quality': VALID_AUDIO_QUALITY, 'output_format':\n OUTPUT_FORMATS.keys(), 'min_filesize_unit': VALID_FILESIZE_UNIT,\n 'max_filesize_unit': VALID_FILESIZE_UNIT, 'subs_lang':\n VALID_SUB_LANGUAGE}\n for key, valid_list in rules_dict.items():\n if settings_dict[key] not in valid_list:\n return False\n if settings_dict['workers_number'] < 1:\n return False\n return True\n\n def _get_options(self):\n \"\"\"\n Return options dictionary.\n \"\"\"\n tmp_options = self.options.copy()\n for key in self.SENSITIVE_KEYS:\n tmp_options[key] = ''\n return tmp_options\n",
"step-3": "<mask token>\n\n\nclass OptionsManager(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n logging.basicConfig(format=format, level=logging.INFO, datefmt='%H:%M:%S')\n logging.getLogger().setLevel(logging.DEBUG)\n\n def __init__(self, config_path):\n self.config_path = config_path\n self.settings_file = os.path.join(config_path, self.SETTINGS_FILENAME)\n self.options = dict()\n self.load_default()\n self.load_from_file()\n\n def load_default(self):\n \"\"\"Load the default options.\n\n Note:\n This method is automatically called by the constructor.\n\n Options Description:\n 'save_path' (string): Path where youtube-dl shoult store \n the downloaded file. default is $HOME (~\\\\Downloads)\n\n 'save_path_dirs' (list): List that contains temporary save paths. \n\n 'video_format' (string): Video format to download.\n When this option is '0' youtube-dl will choose \n the best video format for given URL.\n\n 'second_video_format' (string): Video format to mix with the \n one (-f 18+17)\n\n 'to_audio' (boolean): If True youtube-dl will post process the\n video file.\n\n 'keep_video' (boolean): If True youtube-dl will keep the video\n after post processing it.\n\n 'audio_format' (string): Audio format of the post processed file.\n values are: mp3, wav, aac, m4a, vorbis, opus.\n\n 'audio_quality' (string): Audio quality of the post processed file.\n values are: 9, 5, 0. The lowest value the better the quality.\n\n 'restrict_filenames' (boolean): If True youtube-dl will restrict \n the downloaded file filename to ASCII characters only.\n\n 'output_format' (int): This options sets the downloaded file \n output template. See formats.OUTPUT_FORMATS for mor info.\n\n 'output_template' (string) : Can be any output template supported \n by youtube-dl\n\n 'playlist_start' (int): Playlist index to start downloading\n\n 'playlist_end' (int): Playlist index to stop downloading.\n\n 'max_downloads' (int): Maximun number of video files to download\n from the given playlist.\n\n 'min_filesize' (float): Min file size of the video file.\n if the video is smaller than the given size then\n youtube-dl will abort the download process.\n\n 'max_filesize' (float): Min file size of the video file.\n if the video is larger than the given size then\n youtube-dl will abort the download process.\n\n 'min_filesize_unit' (string): Minimum file size unit.\n values are: '', k, m, g, y, p, e, z, y.\n\n 'max_filesize_unit' (string): Maximum file size unit.\n values are: '', k, m, g, y, p, e, z, y.\n\n 'write_subs' (boolean): If True youtube-dl will try to download \n the subtitles file for the given URL.\n\n 'write_all_subs' (boolean): If True youtube-dl will try to download\n all the available subtitles for the given URL.\n\n 'write_auto_subs' (boolean): If True youtube-dl will try to download \n the automatic subtitlees file for the given URL.\n\n 'embed_subs' (boolean): If True youtube-dl will try to merge the \n subtitles file with the video. (ONLY mp4 files)\n\n 'subs_lang' (string): Language of the subtitles file to download.\n Needs 'write_subs' option.\n\n 'ignore_errors' (boolean): If True youtube-dl will ignore the errors\n and continue the download process.\n\n 'open_dl_dir' (boolean): If True youtube-dl will open the destination\n folder after download process has been completed.\n\n 'write_description' (boolean): If True youtube-dl will the video\n description to a *.description file.\n\n 'write_info' (boolean): If True youtube-dl will write \n video metadata to a *.info.json file.\n\n 'write_thumbnail' (boolean): If True youtube-dl will write a \n thumbnail image to disk.\n\n 'retries' (int): Number of youtube-dl retries.\n\n 'user_agent' (string): Specify a custom user agent for youtube-dl\n\n 'referer' (string): Specify a custom referer to user if the video\n access is restricted to one domain.\n\n 'proxy' (string): Use the specified HTTP/HTTPS proxy.\n\n 'shutdown' (boolean): Shutdown PC after download process completed.\n\n 'sudo_password' (string): SUDO password for the shutdown process \n if the user does not have elevated privileges.\n\n 'username' (string): Username to login with.\n\n 'password' (string): Password to login with.\n\n 'video_password' (string): Video Password for the given URL. \n\n 'youtubedl_path' (string): Absolute the path to the youtube-dl binary.\n Default is the self.config_path. You can change this position \n to point anywhere if you want to use the youtube-dl binary on your system.\n This is also the directory where youtube-dlg will auto download the \n youtube-dl if not exists so you should make sure you have write access \n if you want to update the youtube-dl binary from within youtube-dlg.\n\n 'cmd_args' (string): String that contains extra youtube-dl options \n seperated by spaces.\n\n 'enable_log' (boolean): If True youtube-dlg will enable \n the LogManager, see main() function under __init__().\n\n 'log_time' (boolean): See logmanager.LogManager add_time attribute.\n\n 'workers_number' (int): Number of download workers that download manager \n will spawn. Must be greater than zero.\n\n 'locale_name' (string): Locale name (en_US)\n\n 'main_win_size' (tuple): Main window size (width x height).\n if window becomes to small the program will reset its size.\n see _settings_are_valid method MIN_FRAME_SIZE.\n\n 'opts_win_size' (tuple): Main window size (width x height).\n\n 'selected_video_formats' (list): List that contains the selected\n video formats to display on the main window\n\n 'selected_audio_formats' (list): List that contains the selected\n audio formats to display on the main window\n\n 'selected_format' (string): Current format selected on the main window\n\n 'youtube_dl_debug' (boolean): When True will pass '-v' flag to youtube-dl\n config file options.\n\n 'ignore_config' (boolean): When True will ignore youtube-dl config file option.\n\n 'confirm_exit' (boolean): When True create message to confirm exist youtube-dlg\n\n 'native_hls' (boolean): When True youtube-dl will use the natives HLS implementation.\n\n 'show_completion_popup' (boolean): When True youtube-dl-dlg will create message to inform \n the user for the download completion\n\n 'confirm_deletion' (boolean): When True ask user before item removal.\n\n 'nomtime' (boolean): When True will not use the last-modified header to \n set the file modification time.\n\n 'embed_thumbnail' (boolean): When True will embed the thumbnail in\n the audio file as cover art.\n\n 'add_metadata' (boolean): When True will write metadata to file. \n \"\"\"\n logging.debug('load_options default___________________')\n self.options = {'save_path': os.path.expanduser('~'),\n 'save_path_dirs': [os.path.expanduser('~'), os.path.join(os.\n path.expanduser('~'), 'Downloads'), os.path.join(os.path.\n expanduser('~'), 'Desktop'), os.path.join(os.path.expanduser(\n '~'), 'Videos'), os.path.join(os.path.expanduser('~'), 'Music')\n ], 'video_format': '0', 'second_video_format': '0', 'to_audio':\n False, 'keep_video': False, 'audio_format': '', 'audio_quality':\n '5', 'restrict_filenames': False, 'output_format': 1,\n 'output_template': os.path.join('%(uploader)s',\n '%(title)s.%(ext)s'), 'playlist_start': 1, 'playlist_end': 0,\n 'max_downloads': 0, 'min_filesize': 0, 'max_filesize': 0,\n 'min_filesize_unit': '', 'max_filesize_unit': '', 'write_subs':\n True, 'write_all_subs': False, 'write_auto_subs': False,\n 'embed_subs': False, 'subs_lang': 'en', 'ignore_errors': True,\n 'open_dl_dir': False, 'write_description': False, 'write_info':\n False, 'write_thumbnail': False, 'retries': 10, 'user_agent':\n '', 'referer': '', 'proxy': '', 'shutdown': False,\n 'sudo_password': '', 'username': '', 'password': '',\n 'video_password': '', 'youtubedl_path': self.config_path,\n 'cmd_args': '', 'enable_log': True, 'log_time': True,\n 'workers_number': 3, 'locale_name': 'en_US', 'main_win_size': (\n 740, 490), 'opts_win_size': (640, 490),\n 'selected_video_formats': ['default', 'mp4', 'webm'],\n 'selected_audio_formats': ['mp3', 'm4a', 'vorbis'],\n 'selected_format': '0', 'youtube_dl_debug': False,\n 'ignore_config': True, 'confirm_exit': True, 'native_hls': True,\n 'show_completion_popup': True, 'confirm_deletion': True,\n 'nomtime': False, 'embed_thumbnail': False, 'add_metadata': False}\n\n def load_from_file(self):\n \"\"\"\n Load options from settings file.\n \"\"\"\n if not os.path.exists(self.settings_file):\n return\n with open(self.settings_file, 'rb') as settings_file:\n try:\n options = json.load(settings_file)\n if self._settings_coordinate(options):\n self.options = options\n except:\n self.load_default()\n\n def save_to_file(self):\n \"\"\"Save options to settings file.\n \"\"\"\n check_path(self.config_path)\n with open(self.settings_file, 'w') as settings_file:\n options = self._get_options()\n json.dump(options, settings_file, indent=4, separators=(',', ': '))\n\n def _settings_coordinate(self, settings_dict):\n \"\"\"\n Check settings.json dictionary\n\n Args: \n settings_dict: Options dict loaded. See load_from_file() method.\n \n Return:\n True if settings.json is valid / else False\n \"\"\"\n VALID_VIDEO_FORMAT = ('0', '17', '36', '5', '34', '35', '43', '44',\n '45', '46', '18', '22', '37', '38', '160', '133', '134', '135',\n '136', '137', '264', '138', '242', '243', '244', '247', '248',\n '271', '272', '82', '83', '84', '85', '100', '101', '102',\n '139', '140', '141', '171', '172')\n VALID_AUDIO_FORMAT = 'mp3', 'wav', 'aac', 'm4a', 'vorbis', 'opus', ''\n VALID_AUDIO_QUALITY = '0', '5', '9'\n VALID_FILESIZE_UNIT = '', 'k', 'm', 'g', 't', 'p', 'e', 'z', 'y'\n VALID_SUB_LANGUAGE = ('en', 'el', 'pt', 'fr', 'it', 'ru', 'es',\n 'de', 'he', 'sv', 'tr')\n MIN_FRAME_SIZE = 100\n for key in self.options:\n if key not in settings_dict:\n return False\n if type(self.options[key]) != type(settings_dict[key]):\n return False\n rules_dict = {'video_format': FORMATS.keys(), 'second_video_format':\n VALID_VIDEO_FORMAT, 'audio_format': VALID_AUDIO_FORMAT,\n 'audio_quality': VALID_AUDIO_QUALITY, 'output_format':\n OUTPUT_FORMATS.keys(), 'min_filesize_unit': VALID_FILESIZE_UNIT,\n 'max_filesize_unit': VALID_FILESIZE_UNIT, 'subs_lang':\n VALID_SUB_LANGUAGE}\n for key, valid_list in rules_dict.items():\n if settings_dict[key] not in valid_list:\n return False\n if settings_dict['workers_number'] < 1:\n return False\n return True\n\n def _get_options(self):\n \"\"\"\n Return options dictionary.\n \"\"\"\n tmp_options = self.options.copy()\n for key in self.SENSITIVE_KEYS:\n tmp_options[key] = ''\n return tmp_options\n",
"step-4": "<mask token>\n\n\nclass OptionsManager(object):\n \"\"\"\n This clas is responsible for storing & retrieving the options.\n\n Args: \n config_path (string): Absolute path where OptionsManager\n should store the settings file.\n\n Note:\n See load_default() method for available options.\n\n Example:\n Access the options using the 'options' variable.\n\n opt_manager = OptionsManager('.')\n opt_manager.options['save_path'] = '~/Downloads'\n \"\"\"\n SETTINGS_FILENAME = 'settings.json'\n SENSITIVE_KEYS = 'sudo_password', 'password', 'video_password'\n format = '%(asctime)s: %(message)s'\n logging.basicConfig(format=format, level=logging.INFO, datefmt='%H:%M:%S')\n logging.getLogger().setLevel(logging.DEBUG)\n\n def __init__(self, config_path):\n self.config_path = config_path\n self.settings_file = os.path.join(config_path, self.SETTINGS_FILENAME)\n self.options = dict()\n self.load_default()\n self.load_from_file()\n\n def load_default(self):\n \"\"\"Load the default options.\n\n Note:\n This method is automatically called by the constructor.\n\n Options Description:\n 'save_path' (string): Path where youtube-dl shoult store \n the downloaded file. default is $HOME (~\\\\Downloads)\n\n 'save_path_dirs' (list): List that contains temporary save paths. \n\n 'video_format' (string): Video format to download.\n When this option is '0' youtube-dl will choose \n the best video format for given URL.\n\n 'second_video_format' (string): Video format to mix with the \n one (-f 18+17)\n\n 'to_audio' (boolean): If True youtube-dl will post process the\n video file.\n\n 'keep_video' (boolean): If True youtube-dl will keep the video\n after post processing it.\n\n 'audio_format' (string): Audio format of the post processed file.\n values are: mp3, wav, aac, m4a, vorbis, opus.\n\n 'audio_quality' (string): Audio quality of the post processed file.\n values are: 9, 5, 0. The lowest value the better the quality.\n\n 'restrict_filenames' (boolean): If True youtube-dl will restrict \n the downloaded file filename to ASCII characters only.\n\n 'output_format' (int): This options sets the downloaded file \n output template. See formats.OUTPUT_FORMATS for mor info.\n\n 'output_template' (string) : Can be any output template supported \n by youtube-dl\n\n 'playlist_start' (int): Playlist index to start downloading\n\n 'playlist_end' (int): Playlist index to stop downloading.\n\n 'max_downloads' (int): Maximun number of video files to download\n from the given playlist.\n\n 'min_filesize' (float): Min file size of the video file.\n if the video is smaller than the given size then\n youtube-dl will abort the download process.\n\n 'max_filesize' (float): Min file size of the video file.\n if the video is larger than the given size then\n youtube-dl will abort the download process.\n\n 'min_filesize_unit' (string): Minimum file size unit.\n values are: '', k, m, g, y, p, e, z, y.\n\n 'max_filesize_unit' (string): Maximum file size unit.\n values are: '', k, m, g, y, p, e, z, y.\n\n 'write_subs' (boolean): If True youtube-dl will try to download \n the subtitles file for the given URL.\n\n 'write_all_subs' (boolean): If True youtube-dl will try to download\n all the available subtitles for the given URL.\n\n 'write_auto_subs' (boolean): If True youtube-dl will try to download \n the automatic subtitlees file for the given URL.\n\n 'embed_subs' (boolean): If True youtube-dl will try to merge the \n subtitles file with the video. (ONLY mp4 files)\n\n 'subs_lang' (string): Language of the subtitles file to download.\n Needs 'write_subs' option.\n\n 'ignore_errors' (boolean): If True youtube-dl will ignore the errors\n and continue the download process.\n\n 'open_dl_dir' (boolean): If True youtube-dl will open the destination\n folder after download process has been completed.\n\n 'write_description' (boolean): If True youtube-dl will the video\n description to a *.description file.\n\n 'write_info' (boolean): If True youtube-dl will write \n video metadata to a *.info.json file.\n\n 'write_thumbnail' (boolean): If True youtube-dl will write a \n thumbnail image to disk.\n\n 'retries' (int): Number of youtube-dl retries.\n\n 'user_agent' (string): Specify a custom user agent for youtube-dl\n\n 'referer' (string): Specify a custom referer to user if the video\n access is restricted to one domain.\n\n 'proxy' (string): Use the specified HTTP/HTTPS proxy.\n\n 'shutdown' (boolean): Shutdown PC after download process completed.\n\n 'sudo_password' (string): SUDO password for the shutdown process \n if the user does not have elevated privileges.\n\n 'username' (string): Username to login with.\n\n 'password' (string): Password to login with.\n\n 'video_password' (string): Video Password for the given URL. \n\n 'youtubedl_path' (string): Absolute the path to the youtube-dl binary.\n Default is the self.config_path. You can change this position \n to point anywhere if you want to use the youtube-dl binary on your system.\n This is also the directory where youtube-dlg will auto download the \n youtube-dl if not exists so you should make sure you have write access \n if you want to update the youtube-dl binary from within youtube-dlg.\n\n 'cmd_args' (string): String that contains extra youtube-dl options \n seperated by spaces.\n\n 'enable_log' (boolean): If True youtube-dlg will enable \n the LogManager, see main() function under __init__().\n\n 'log_time' (boolean): See logmanager.LogManager add_time attribute.\n\n 'workers_number' (int): Number of download workers that download manager \n will spawn. Must be greater than zero.\n\n 'locale_name' (string): Locale name (en_US)\n\n 'main_win_size' (tuple): Main window size (width x height).\n if window becomes to small the program will reset its size.\n see _settings_are_valid method MIN_FRAME_SIZE.\n\n 'opts_win_size' (tuple): Main window size (width x height).\n\n 'selected_video_formats' (list): List that contains the selected\n video formats to display on the main window\n\n 'selected_audio_formats' (list): List that contains the selected\n audio formats to display on the main window\n\n 'selected_format' (string): Current format selected on the main window\n\n 'youtube_dl_debug' (boolean): When True will pass '-v' flag to youtube-dl\n config file options.\n\n 'ignore_config' (boolean): When True will ignore youtube-dl config file option.\n\n 'confirm_exit' (boolean): When True create message to confirm exist youtube-dlg\n\n 'native_hls' (boolean): When True youtube-dl will use the natives HLS implementation.\n\n 'show_completion_popup' (boolean): When True youtube-dl-dlg will create message to inform \n the user for the download completion\n\n 'confirm_deletion' (boolean): When True ask user before item removal.\n\n 'nomtime' (boolean): When True will not use the last-modified header to \n set the file modification time.\n\n 'embed_thumbnail' (boolean): When True will embed the thumbnail in\n the audio file as cover art.\n\n 'add_metadata' (boolean): When True will write metadata to file. \n \"\"\"\n logging.debug('load_options default___________________')\n self.options = {'save_path': os.path.expanduser('~'),\n 'save_path_dirs': [os.path.expanduser('~'), os.path.join(os.\n path.expanduser('~'), 'Downloads'), os.path.join(os.path.\n expanduser('~'), 'Desktop'), os.path.join(os.path.expanduser(\n '~'), 'Videos'), os.path.join(os.path.expanduser('~'), 'Music')\n ], 'video_format': '0', 'second_video_format': '0', 'to_audio':\n False, 'keep_video': False, 'audio_format': '', 'audio_quality':\n '5', 'restrict_filenames': False, 'output_format': 1,\n 'output_template': os.path.join('%(uploader)s',\n '%(title)s.%(ext)s'), 'playlist_start': 1, 'playlist_end': 0,\n 'max_downloads': 0, 'min_filesize': 0, 'max_filesize': 0,\n 'min_filesize_unit': '', 'max_filesize_unit': '', 'write_subs':\n True, 'write_all_subs': False, 'write_auto_subs': False,\n 'embed_subs': False, 'subs_lang': 'en', 'ignore_errors': True,\n 'open_dl_dir': False, 'write_description': False, 'write_info':\n False, 'write_thumbnail': False, 'retries': 10, 'user_agent':\n '', 'referer': '', 'proxy': '', 'shutdown': False,\n 'sudo_password': '', 'username': '', 'password': '',\n 'video_password': '', 'youtubedl_path': self.config_path,\n 'cmd_args': '', 'enable_log': True, 'log_time': True,\n 'workers_number': 3, 'locale_name': 'en_US', 'main_win_size': (\n 740, 490), 'opts_win_size': (640, 490),\n 'selected_video_formats': ['default', 'mp4', 'webm'],\n 'selected_audio_formats': ['mp3', 'm4a', 'vorbis'],\n 'selected_format': '0', 'youtube_dl_debug': False,\n 'ignore_config': True, 'confirm_exit': True, 'native_hls': True,\n 'show_completion_popup': True, 'confirm_deletion': True,\n 'nomtime': False, 'embed_thumbnail': False, 'add_metadata': False}\n\n def load_from_file(self):\n \"\"\"\n Load options from settings file.\n \"\"\"\n if not os.path.exists(self.settings_file):\n return\n with open(self.settings_file, 'rb') as settings_file:\n try:\n options = json.load(settings_file)\n if self._settings_coordinate(options):\n self.options = options\n except:\n self.load_default()\n\n def save_to_file(self):\n \"\"\"Save options to settings file.\n \"\"\"\n check_path(self.config_path)\n with open(self.settings_file, 'w') as settings_file:\n options = self._get_options()\n json.dump(options, settings_file, indent=4, separators=(',', ': '))\n\n def _settings_coordinate(self, settings_dict):\n \"\"\"\n Check settings.json dictionary\n\n Args: \n settings_dict: Options dict loaded. See load_from_file() method.\n \n Return:\n True if settings.json is valid / else False\n \"\"\"\n VALID_VIDEO_FORMAT = ('0', '17', '36', '5', '34', '35', '43', '44',\n '45', '46', '18', '22', '37', '38', '160', '133', '134', '135',\n '136', '137', '264', '138', '242', '243', '244', '247', '248',\n '271', '272', '82', '83', '84', '85', '100', '101', '102',\n '139', '140', '141', '171', '172')\n VALID_AUDIO_FORMAT = 'mp3', 'wav', 'aac', 'm4a', 'vorbis', 'opus', ''\n VALID_AUDIO_QUALITY = '0', '5', '9'\n VALID_FILESIZE_UNIT = '', 'k', 'm', 'g', 't', 'p', 'e', 'z', 'y'\n VALID_SUB_LANGUAGE = ('en', 'el', 'pt', 'fr', 'it', 'ru', 'es',\n 'de', 'he', 'sv', 'tr')\n MIN_FRAME_SIZE = 100\n for key in self.options:\n if key not in settings_dict:\n return False\n if type(self.options[key]) != type(settings_dict[key]):\n return False\n rules_dict = {'video_format': FORMATS.keys(), 'second_video_format':\n VALID_VIDEO_FORMAT, 'audio_format': VALID_AUDIO_FORMAT,\n 'audio_quality': VALID_AUDIO_QUALITY, 'output_format':\n OUTPUT_FORMATS.keys(), 'min_filesize_unit': VALID_FILESIZE_UNIT,\n 'max_filesize_unit': VALID_FILESIZE_UNIT, 'subs_lang':\n VALID_SUB_LANGUAGE}\n for key, valid_list in rules_dict.items():\n if settings_dict[key] not in valid_list:\n return False\n if settings_dict['workers_number'] < 1:\n return False\n return True\n\n def _get_options(self):\n \"\"\"\n Return options dictionary.\n \"\"\"\n tmp_options = self.options.copy()\n for key in self.SENSITIVE_KEYS:\n tmp_options[key] = ''\n return tmp_options\n",
"step-5": "# This Python file uses the following encoding: utf-8\n\nimport json \nimport os\nimport logging\n\nfrom .utility_helper import (\n check_path,\n)\n\nfrom .formats import (\n OUTPUT_FORMATS,\n FORMATS\n)\n\nclass OptionsManager(object):\n \"\"\"\n This clas is responsible for storing & retrieving the options.\n\n Args: \n config_path (string): Absolute path where OptionsManager\n should store the settings file.\n\n Note:\n See load_default() method for available options.\n\n Example:\n Access the options using the 'options' variable.\n\n opt_manager = OptionsManager('.')\n opt_manager.options['save_path'] = '~/Downloads'\n \"\"\"\n SETTINGS_FILENAME = 'settings.json'\n SENSITIVE_KEYS = ('sudo_password', 'password', 'video_password') \n\n format = \"%(asctime)s: %(message)s\"\n logging.basicConfig(format=format, level=logging.INFO, datefmt='%H:%M:%S')\n logging.getLogger().setLevel(logging.DEBUG)\n\n\n def __init__(self, config_path):\n self.config_path = config_path\n self.settings_file = os.path.join(config_path, self.SETTINGS_FILENAME)\n self.options = dict()\n self.load_default()\n self.load_from_file()\n\n def load_default(self):\n \"\"\"Load the default options.\n\n Note:\n This method is automatically called by the constructor.\n\n Options Description:\n 'save_path' (string): Path where youtube-dl shoult store \n the downloaded file. default is $HOME (~\\Downloads)\n\n 'save_path_dirs' (list): List that contains temporary save paths. \n\n 'video_format' (string): Video format to download.\n When this option is '0' youtube-dl will choose \n the best video format for given URL.\n\n 'second_video_format' (string): Video format to mix with the \n one (-f 18+17)\n\n 'to_audio' (boolean): If True youtube-dl will post process the\n video file.\n\n 'keep_video' (boolean): If True youtube-dl will keep the video\n after post processing it.\n\n 'audio_format' (string): Audio format of the post processed file.\n values are: mp3, wav, aac, m4a, vorbis, opus.\n\n 'audio_quality' (string): Audio quality of the post processed file.\n values are: 9, 5, 0. The lowest value the better the quality.\n\n 'restrict_filenames' (boolean): If True youtube-dl will restrict \n the downloaded file filename to ASCII characters only.\n\n 'output_format' (int): This options sets the downloaded file \n output template. See formats.OUTPUT_FORMATS for mor info.\n\n 'output_template' (string) : Can be any output template supported \n by youtube-dl\n\n 'playlist_start' (int): Playlist index to start downloading\n\n 'playlist_end' (int): Playlist index to stop downloading.\n\n 'max_downloads' (int): Maximun number of video files to download\n from the given playlist.\n\n 'min_filesize' (float): Min file size of the video file.\n if the video is smaller than the given size then\n youtube-dl will abort the download process.\n\n 'max_filesize' (float): Min file size of the video file.\n if the video is larger than the given size then\n youtube-dl will abort the download process.\n\n 'min_filesize_unit' (string): Minimum file size unit.\n values are: '', k, m, g, y, p, e, z, y.\n\n 'max_filesize_unit' (string): Maximum file size unit.\n values are: '', k, m, g, y, p, e, z, y.\n\n 'write_subs' (boolean): If True youtube-dl will try to download \n the subtitles file for the given URL.\n\n 'write_all_subs' (boolean): If True youtube-dl will try to download\n all the available subtitles for the given URL.\n\n 'write_auto_subs' (boolean): If True youtube-dl will try to download \n the automatic subtitlees file for the given URL.\n\n 'embed_subs' (boolean): If True youtube-dl will try to merge the \n subtitles file with the video. (ONLY mp4 files)\n\n 'subs_lang' (string): Language of the subtitles file to download.\n Needs 'write_subs' option.\n\n 'ignore_errors' (boolean): If True youtube-dl will ignore the errors\n and continue the download process.\n\n 'open_dl_dir' (boolean): If True youtube-dl will open the destination\n folder after download process has been completed.\n\n 'write_description' (boolean): If True youtube-dl will the video\n description to a *.description file.\n\n 'write_info' (boolean): If True youtube-dl will write \n video metadata to a *.info.json file.\n\n 'write_thumbnail' (boolean): If True youtube-dl will write a \n thumbnail image to disk.\n\n 'retries' (int): Number of youtube-dl retries.\n\n 'user_agent' (string): Specify a custom user agent for youtube-dl\n\n 'referer' (string): Specify a custom referer to user if the video\n access is restricted to one domain.\n\n 'proxy' (string): Use the specified HTTP/HTTPS proxy.\n\n 'shutdown' (boolean): Shutdown PC after download process completed.\n\n 'sudo_password' (string): SUDO password for the shutdown process \n if the user does not have elevated privileges.\n\n 'username' (string): Username to login with.\n\n 'password' (string): Password to login with.\n\n 'video_password' (string): Video Password for the given URL. \n\n 'youtubedl_path' (string): Absolute the path to the youtube-dl binary.\n Default is the self.config_path. You can change this position \n to point anywhere if you want to use the youtube-dl binary on your system.\n This is also the directory where youtube-dlg will auto download the \n youtube-dl if not exists so you should make sure you have write access \n if you want to update the youtube-dl binary from within youtube-dlg.\n\n 'cmd_args' (string): String that contains extra youtube-dl options \n seperated by spaces.\n\n 'enable_log' (boolean): If True youtube-dlg will enable \n the LogManager, see main() function under __init__().\n\n 'log_time' (boolean): See logmanager.LogManager add_time attribute.\n\n 'workers_number' (int): Number of download workers that download manager \n will spawn. Must be greater than zero.\n\n 'locale_name' (string): Locale name (en_US)\n\n 'main_win_size' (tuple): Main window size (width x height).\n if window becomes to small the program will reset its size.\n see _settings_are_valid method MIN_FRAME_SIZE.\n\n 'opts_win_size' (tuple): Main window size (width x height).\n\n 'selected_video_formats' (list): List that contains the selected\n video formats to display on the main window\n\n 'selected_audio_formats' (list): List that contains the selected\n audio formats to display on the main window\n\n 'selected_format' (string): Current format selected on the main window\n\n 'youtube_dl_debug' (boolean): When True will pass '-v' flag to youtube-dl\n config file options.\n\n 'ignore_config' (boolean): When True will ignore youtube-dl config file option.\n\n 'confirm_exit' (boolean): When True create message to confirm exist youtube-dlg\n\n 'native_hls' (boolean): When True youtube-dl will use the natives HLS implementation.\n\n 'show_completion_popup' (boolean): When True youtube-dl-dlg will create message to inform \n the user for the download completion\n\n 'confirm_deletion' (boolean): When True ask user before item removal.\n\n 'nomtime' (boolean): When True will not use the last-modified header to \n set the file modification time.\n\n 'embed_thumbnail' (boolean): When True will embed the thumbnail in\n the audio file as cover art.\n\n 'add_metadata' (boolean): When True will write metadata to file. \n \"\"\"\n\n #+++++<DEBUG_LOG>\n logging.debug(\"load_options default___________________\")\n #-----<DEBUG_LOG>\n \n self.options = {\n 'save_path' : os.path.expanduser('~'),\n 'save_path_dirs': [\n os.path.expanduser('~'),\n os.path.join(os.path.expanduser('~'), \"Downloads\"),\n os.path.join(os.path.expanduser('~'), \"Desktop\"),\n os.path.join(os.path.expanduser('~'), \"Videos\"),\n os.path.join(os.path.expanduser('~'), \"Music\"), \n ],\n 'video_format': '0',\n 'second_video_format': '0',\n 'to_audio': False,\n 'keep_video': False,\n 'audio_format': '',\n 'audio_quality': '5',\n 'restrict_filenames': False,\n 'output_format': 1,\n 'output_template': os.path.join('%(uploader)s', '%(title)s.%(ext)s'),\n 'playlist_start': 1,\n 'playlist_end': 0,\n 'max_downloads': 0,\n 'min_filesize': 0,\n 'max_filesize': 0,\n 'min_filesize_unit': '',\n 'max_filesize_unit': '',\n 'write_subs': True,\n 'write_all_subs': False,\n 'write_auto_subs': False,\n 'embed_subs': False,\n 'subs_lang': 'en',\n 'ignore_errors': True,\n 'open_dl_dir': False,\n 'write_description': False,\n 'write_info': False,\n 'write_thumbnail': False,\n 'retries': 10,\n 'user_agent': '',\n 'referer': '',\n 'proxy': '',\n 'shutdown': False,\n 'sudo_password': '',\n 'username': '',\n 'password': '',\n 'video_password': '',\n 'youtubedl_path': self.config_path,\n 'cmd_args': '',\n 'enable_log': True,\n 'log_time': True,\n 'workers_number': 3,\n 'locale_name': 'en_US',\n 'main_win_size': (740, 490),\n 'opts_win_size': (640, 490),\n 'selected_video_formats': ['default', 'mp4', 'webm'],\n 'selected_audio_formats': ['mp3', 'm4a', 'vorbis'],\n 'selected_format': '0',\n 'youtube_dl_debug': False,\n 'ignore_config': True,\n 'confirm_exit': True,\n 'native_hls': True,\n 'show_completion_popup': True,\n 'confirm_deletion': True,\n 'nomtime': False,\n 'embed_thumbnail': False,\n 'add_metadata': False\n }\n\n def load_from_file(self):\n \"\"\"\n Load options from settings file.\n \"\"\"\n if not os.path.exists(self.settings_file):\n return\n \n with open(self.settings_file, 'rb') as settings_file:\n try:\n options = json.load(settings_file)\n \n if self._settings_coordinate(options):\n self.options = options\n except:\n self.load_default()\n\n def save_to_file(self):\n \"\"\"Save options to settings file.\n \"\"\"\n check_path(self.config_path)\n\n with open(self.settings_file, 'w') as settings_file:\n options = self._get_options()\n json.dump(options,\n \t settings_file,\n \t indent=4,\n \t separators=(',', ': '))\n \n def _settings_coordinate(self, settings_dict):\n \"\"\"\n Check settings.json dictionary\n\n Args: \n settings_dict: Options dict loaded. See load_from_file() method.\n \n Return:\n True if settings.json is valid / else False\n \"\"\"\n VALID_VIDEO_FORMAT = ('0', '17', '36', '5', '34', '35', '43', '44', '45',\n '46', '18', '22', '37', '38', '160', '133', '134', '135', '136','137',\n '264', '138', '242', '243', '244', '247', '248', '271', '272', '82',\n '83', '84', '85', '100', '101', '102', '139', '140', '141', '171', '172')\n\n VALID_AUDIO_FORMAT = ('mp3', 'wav', 'aac', 'm4a', 'vorbis', 'opus', '')\n\n VALID_AUDIO_QUALITY = ('0', '5', '9')\n\n VALID_FILESIZE_UNIT = ('', 'k', 'm', 'g', 't', 'p', 'e', 'z', 'y')\n\n VALID_SUB_LANGUAGE = ('en', 'el', 'pt', 'fr', 'it', 'ru', 'es', 'de', 'he', 'sv', 'tr')\n\n MIN_FRAME_SIZE = 100\n\n for key in self.options:\n if key not in settings_dict:\n return False\n\n if type(self.options[key]) != type(settings_dict[key]):\n return False\n\n # Check if each key has a valid value\n rules_dict = {\n 'video_format': FORMATS.keys(),\n 'second_video_format': VALID_VIDEO_FORMAT,\n 'audio_format': VALID_AUDIO_FORMAT,\n 'audio_quality': VALID_AUDIO_QUALITY,\n 'output_format': OUTPUT_FORMATS.keys(),\n 'min_filesize_unit': VALID_FILESIZE_UNIT,\n 'max_filesize_unit': VALID_FILESIZE_UNIT,\n 'subs_lang': VALID_SUB_LANGUAGE\n }\n\n for key, valid_list in rules_dict.items():\n if settings_dict[key] not in valid_list:\n return False\n\n if settings_dict['workers_number'] < 1:\n return False\n\n return True\n\n def _get_options(self):\n \"\"\"\n Return options dictionary.\n \"\"\"\n tmp_options = self.options.copy()\n\n for key in self.SENSITIVE_KEYS:\n tmp_options[key] = ''\n\n return tmp_options\n\n \n \n",
"step-ids": [
4,
6,
7,
9,
11
]
}
|
[
4,
6,
7,
9,
11
] |
import pygame
import random
from pygame.locals import *
import pygame
from pygame.locals import *
class GameObject(pygame.sprite.Sprite):
SIZE = 8
def __init__(self, x, y, surface):
super(GameObject, self).__init__()
self.x = x
self.y = y
self.surface = surface
def getDistance(self, other):
return abs(self.x-other.x) + abs(self.y - other.y)
def collide(self, main, other):
pass
import gameobject
class Food(gameobject.GameObject):
def __init__(self, x, y, surface, time = random.randint(0, 50)):
super(Food, self).__init__(x,y,surface)
self.dead = False
self.SIZE = gameobject.GameObject.SIZE
self.image = pygame.Surface((2*self.SIZE, 2*self.SIZE),
flags = SRCALPHA)
self.image.convert()
self.rect = pygame.draw.circle(self.image,
pygame.Color("blue"),
(self.SIZE,self.SIZE), self.SIZE/2+2)
self.rect.midtop = (x,y)
def update(self):
pass
# self.rect.midtop = (self.x, self.y)
def collide(self, main, other):
if not other == self and not self.dead:
self.dead = True
|
normal
|
{
"blob_id": "c589ce4ba2ae60d14787a8939146f6140fff1f01",
"index": 7914,
"step-1": "<mask token>\n\n\nclass GameObject(pygame.sprite.Sprite):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n\n\nclass Food(gameobject.GameObject):\n\n def __init__(self, x, y, surface, time=random.randint(0, 50)):\n super(Food, self).__init__(x, y, surface)\n self.dead = False\n self.SIZE = gameobject.GameObject.SIZE\n self.image = pygame.Surface((2 * self.SIZE, 2 * self.SIZE), flags=\n SRCALPHA)\n self.image.convert()\n self.rect = pygame.draw.circle(self.image, pygame.Color('blue'), (\n self.SIZE, self.SIZE), self.SIZE / 2 + 2)\n self.rect.midtop = x, y\n\n def update(self):\n pass\n\n def collide(self, main, other):\n if not other == self and not self.dead:\n self.dead = True\n",
"step-2": "<mask token>\n\n\nclass GameObject(pygame.sprite.Sprite):\n <mask token>\n <mask token>\n\n def getDistance(self, other):\n return abs(self.x - other.x) + abs(self.y - other.y)\n\n def collide(self, main, other):\n pass\n\n\n<mask token>\n\n\nclass Food(gameobject.GameObject):\n\n def __init__(self, x, y, surface, time=random.randint(0, 50)):\n super(Food, self).__init__(x, y, surface)\n self.dead = False\n self.SIZE = gameobject.GameObject.SIZE\n self.image = pygame.Surface((2 * self.SIZE, 2 * self.SIZE), flags=\n SRCALPHA)\n self.image.convert()\n self.rect = pygame.draw.circle(self.image, pygame.Color('blue'), (\n self.SIZE, self.SIZE), self.SIZE / 2 + 2)\n self.rect.midtop = x, y\n\n def update(self):\n pass\n\n def collide(self, main, other):\n if not other == self and not self.dead:\n self.dead = True\n",
"step-3": "<mask token>\n\n\nclass GameObject(pygame.sprite.Sprite):\n <mask token>\n\n def __init__(self, x, y, surface):\n super(GameObject, self).__init__()\n self.x = x\n self.y = y\n self.surface = surface\n\n def getDistance(self, other):\n return abs(self.x - other.x) + abs(self.y - other.y)\n\n def collide(self, main, other):\n pass\n\n\n<mask token>\n\n\nclass Food(gameobject.GameObject):\n\n def __init__(self, x, y, surface, time=random.randint(0, 50)):\n super(Food, self).__init__(x, y, surface)\n self.dead = False\n self.SIZE = gameobject.GameObject.SIZE\n self.image = pygame.Surface((2 * self.SIZE, 2 * self.SIZE), flags=\n SRCALPHA)\n self.image.convert()\n self.rect = pygame.draw.circle(self.image, pygame.Color('blue'), (\n self.SIZE, self.SIZE), self.SIZE / 2 + 2)\n self.rect.midtop = x, y\n\n def update(self):\n pass\n\n def collide(self, main, other):\n if not other == self and not self.dead:\n self.dead = True\n",
"step-4": "import pygame\nimport random\nfrom pygame.locals import *\nimport pygame\nfrom pygame.locals import *\n\n\nclass GameObject(pygame.sprite.Sprite):\n SIZE = 8\n\n def __init__(self, x, y, surface):\n super(GameObject, self).__init__()\n self.x = x\n self.y = y\n self.surface = surface\n\n def getDistance(self, other):\n return abs(self.x - other.x) + abs(self.y - other.y)\n\n def collide(self, main, other):\n pass\n\n\nimport gameobject\n\n\nclass Food(gameobject.GameObject):\n\n def __init__(self, x, y, surface, time=random.randint(0, 50)):\n super(Food, self).__init__(x, y, surface)\n self.dead = False\n self.SIZE = gameobject.GameObject.SIZE\n self.image = pygame.Surface((2 * self.SIZE, 2 * self.SIZE), flags=\n SRCALPHA)\n self.image.convert()\n self.rect = pygame.draw.circle(self.image, pygame.Color('blue'), (\n self.SIZE, self.SIZE), self.SIZE / 2 + 2)\n self.rect.midtop = x, y\n\n def update(self):\n pass\n\n def collide(self, main, other):\n if not other == self and not self.dead:\n self.dead = True\n",
"step-5": "import pygame\nimport random\n \nfrom pygame.locals import *\nimport pygame\n \nfrom pygame.locals import *\n \nclass GameObject(pygame.sprite.Sprite):\n SIZE = 8\n def __init__(self, x, y, surface):\n super(GameObject, self).__init__()\n self.x = x\n self.y = y\n self.surface = surface\n \n \n def getDistance(self, other):\n return abs(self.x-other.x) + abs(self.y - other.y)\n \n def collide(self, main, other): \n pass\nimport gameobject\n\n \nclass Food(gameobject.GameObject):\n \n def __init__(self, x, y, surface, time = random.randint(0, 50)):\n super(Food, self).__init__(x,y,surface)\n self.dead = False\n self.SIZE = gameobject.GameObject.SIZE\n self.image = pygame.Surface((2*self.SIZE, 2*self.SIZE),\n flags = SRCALPHA)\n self.image.convert()\n \n self.rect = pygame.draw.circle(self.image,\n pygame.Color(\"blue\"),\n (self.SIZE,self.SIZE), self.SIZE/2+2)\n \n \n self.rect.midtop = (x,y)\n \n def update(self):\n pass\n # self.rect.midtop = (self.x, self.y)\n \n def collide(self, main, other):\n if not other == self and not self.dead: \n self.dead = True\n",
"step-ids": [
5,
7,
8,
10,
11
]
}
|
[
5,
7,
8,
10,
11
] |
# from mini_imagenet_dataloader import MiniImageNetDataLoader
import os
os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
from tqdm import tqdm
import torch.nn.functional as F
from torchmeta.utils.gradient_based import gradient_update_parameters
from libs.models.maml_model import MetaConvModel
from libs.mini_objecta_dataLoader import FSDataLoader
def get_accuracy(logits, targets):
_, predictions = torch.max(logits, dim=-1)
return torch.mean(predictions.eq(targets).float())
def ModelConvMiniImagenet(out_features, hidden_size=84):
return MetaConvModel(3, out_features, hidden_size=hidden_size,
feature_size=5 * 5 * hidden_size)
if __name__ == "__main__":
classes_num = 5
model = ModelConvMiniImagenet(classes_num)
model.load_state_dict(torch.load('trained parameters/maml_miniimagenet_5shot_5way.th'))
model.zero_grad()
dataloader = FSDataLoader()
meta_optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)
accuracy_l = list()
loss = nn.CrossEntropyLoss()
model.train()
num_of_tasks = 100
epochs = 1
with tqdm(dataloader, total=num_of_tasks) as qbar:
for idx, batch in enumerate(qbar):
model.zero_grad()
train_inputs, train_targets = batch['Train']
test_inputs, test_targets = batch['Test']
for _ in range(epochs):
for task_idx, (train_input, train_target, test_input,
test_target) in enumerate(zip(train_inputs, train_targets,
test_inputs, test_targets)):
outer_loss = torch.tensor(0., device='cuda')
accuracy = torch.tensor(0., device='cuda')
train_logit = model(train_input)
inner_loss = F.cross_entropy(train_logit, train_target)
params = gradient_update_parameters(model, inner_loss)
test_logit = model(test_input , params=params)
outer_loss += F.cross_entropy(test_logit, test_target)
with torch.no_grad():
accuracy += get_accuracy(test_logit, test_target)
outer_loss.div_(1)
accuracy.div_(1)
outer_loss.backward()
meta_optimizer.step()
accuracy_l.append(accuracy.item())
if idx > num_of_tasks-1:
break
plt.title('MAML miniobjectnet training (100 tasks)')
plt.xlabel('Tasks (1 epoch)')
plt.ylabel('Accuracy')
plt.plot(accuracy_l)
plt.show()
print(sum(accuracy_l) / len(accuracy_l))
|
normal
|
{
"blob_id": "e2a50fbd277ab868fbe71f9ff113a68a30b9f893",
"index": 2523,
"step-1": "<mask token>\n\n\ndef ModelConvMiniImagenet(out_features, hidden_size=84):\n return MetaConvModel(3, out_features, hidden_size=hidden_size,\n feature_size=5 * 5 * hidden_size)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_accuracy(logits, targets):\n _, predictions = torch.max(logits, dim=-1)\n return torch.mean(predictions.eq(targets).float())\n\n\ndef ModelConvMiniImagenet(out_features, hidden_size=84):\n return MetaConvModel(3, out_features, hidden_size=hidden_size,\n feature_size=5 * 5 * hidden_size)\n\n\n<mask token>\n",
"step-3": "<mask token>\nos.environ['KMP_DUPLICATE_LIB_OK'] = 'TRUE'\n<mask token>\n\n\ndef get_accuracy(logits, targets):\n _, predictions = torch.max(logits, dim=-1)\n return torch.mean(predictions.eq(targets).float())\n\n\ndef ModelConvMiniImagenet(out_features, hidden_size=84):\n return MetaConvModel(3, out_features, hidden_size=hidden_size,\n feature_size=5 * 5 * hidden_size)\n\n\nif __name__ == '__main__':\n classes_num = 5\n model = ModelConvMiniImagenet(classes_num)\n model.load_state_dict(torch.load(\n 'trained parameters/maml_miniimagenet_5shot_5way.th'))\n model.zero_grad()\n dataloader = FSDataLoader()\n meta_optimizer = torch.optim.Adam(model.parameters(), lr=0.001)\n accuracy_l = list()\n loss = nn.CrossEntropyLoss()\n model.train()\n num_of_tasks = 100\n epochs = 1\n with tqdm(dataloader, total=num_of_tasks) as qbar:\n for idx, batch in enumerate(qbar):\n model.zero_grad()\n train_inputs, train_targets = batch['Train']\n test_inputs, test_targets = batch['Test']\n for _ in range(epochs):\n for task_idx, (train_input, train_target, test_input,\n test_target) in enumerate(zip(train_inputs,\n train_targets, test_inputs, test_targets)):\n outer_loss = torch.tensor(0.0, device='cuda')\n accuracy = torch.tensor(0.0, device='cuda')\n train_logit = model(train_input)\n inner_loss = F.cross_entropy(train_logit, train_target)\n params = gradient_update_parameters(model, inner_loss)\n test_logit = model(test_input, params=params)\n outer_loss += F.cross_entropy(test_logit, test_target)\n with torch.no_grad():\n accuracy += get_accuracy(test_logit, test_target)\n outer_loss.div_(1)\n accuracy.div_(1)\n outer_loss.backward()\n meta_optimizer.step()\n accuracy_l.append(accuracy.item())\n if idx > num_of_tasks - 1:\n break\n plt.title('MAML miniobjectnet training (100 tasks)')\n plt.xlabel('Tasks (1 epoch)')\n plt.ylabel('Accuracy')\n plt.plot(accuracy_l)\n plt.show()\n print(sum(accuracy_l) / len(accuracy_l))\n",
"step-4": "import os\nos.environ['KMP_DUPLICATE_LIB_OK'] = 'TRUE'\nimport matplotlib.pyplot as plt\nimport torch\nimport torch.nn as nn\nfrom tqdm import tqdm\nimport torch.nn.functional as F\nfrom torchmeta.utils.gradient_based import gradient_update_parameters\nfrom libs.models.maml_model import MetaConvModel\nfrom libs.mini_objecta_dataLoader import FSDataLoader\n\n\ndef get_accuracy(logits, targets):\n _, predictions = torch.max(logits, dim=-1)\n return torch.mean(predictions.eq(targets).float())\n\n\ndef ModelConvMiniImagenet(out_features, hidden_size=84):\n return MetaConvModel(3, out_features, hidden_size=hidden_size,\n feature_size=5 * 5 * hidden_size)\n\n\nif __name__ == '__main__':\n classes_num = 5\n model = ModelConvMiniImagenet(classes_num)\n model.load_state_dict(torch.load(\n 'trained parameters/maml_miniimagenet_5shot_5way.th'))\n model.zero_grad()\n dataloader = FSDataLoader()\n meta_optimizer = torch.optim.Adam(model.parameters(), lr=0.001)\n accuracy_l = list()\n loss = nn.CrossEntropyLoss()\n model.train()\n num_of_tasks = 100\n epochs = 1\n with tqdm(dataloader, total=num_of_tasks) as qbar:\n for idx, batch in enumerate(qbar):\n model.zero_grad()\n train_inputs, train_targets = batch['Train']\n test_inputs, test_targets = batch['Test']\n for _ in range(epochs):\n for task_idx, (train_input, train_target, test_input,\n test_target) in enumerate(zip(train_inputs,\n train_targets, test_inputs, test_targets)):\n outer_loss = torch.tensor(0.0, device='cuda')\n accuracy = torch.tensor(0.0, device='cuda')\n train_logit = model(train_input)\n inner_loss = F.cross_entropy(train_logit, train_target)\n params = gradient_update_parameters(model, inner_loss)\n test_logit = model(test_input, params=params)\n outer_loss += F.cross_entropy(test_logit, test_target)\n with torch.no_grad():\n accuracy += get_accuracy(test_logit, test_target)\n outer_loss.div_(1)\n accuracy.div_(1)\n outer_loss.backward()\n meta_optimizer.step()\n accuracy_l.append(accuracy.item())\n if idx > num_of_tasks - 1:\n break\n plt.title('MAML miniobjectnet training (100 tasks)')\n plt.xlabel('Tasks (1 epoch)')\n plt.ylabel('Accuracy')\n plt.plot(accuracy_l)\n plt.show()\n print(sum(accuracy_l) / len(accuracy_l))\n",
"step-5": "# from mini_imagenet_dataloader import MiniImageNetDataLoader\nimport os\nos.environ[\"KMP_DUPLICATE_LIB_OK\"]=\"TRUE\"\nimport matplotlib.pyplot as plt\nimport torch\nimport torch.nn as nn\nfrom tqdm import tqdm\nimport torch.nn.functional as F\nfrom torchmeta.utils.gradient_based import gradient_update_parameters\nfrom libs.models.maml_model import MetaConvModel\nfrom libs.mini_objecta_dataLoader import FSDataLoader\n\ndef get_accuracy(logits, targets):\n _, predictions = torch.max(logits, dim=-1)\n return torch.mean(predictions.eq(targets).float())\n\ndef ModelConvMiniImagenet(out_features, hidden_size=84):\n return MetaConvModel(3, out_features, hidden_size=hidden_size,\n feature_size=5 * 5 * hidden_size)\n\nif __name__ == \"__main__\":\n classes_num = 5\n model = ModelConvMiniImagenet(classes_num)\n model.load_state_dict(torch.load('trained parameters/maml_miniimagenet_5shot_5way.th'))\n model.zero_grad()\n\n dataloader = FSDataLoader()\n\n meta_optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)\n accuracy_l = list()\n loss = nn.CrossEntropyLoss()\n model.train()\n num_of_tasks = 100\n epochs = 1\n with tqdm(dataloader, total=num_of_tasks) as qbar:\n for idx, batch in enumerate(qbar):\n model.zero_grad()\n train_inputs, train_targets = batch['Train']\n test_inputs, test_targets = batch['Test']\n \n for _ in range(epochs):\n for task_idx, (train_input, train_target, test_input,\n test_target) in enumerate(zip(train_inputs, train_targets,\n test_inputs, test_targets)):\n outer_loss = torch.tensor(0., device='cuda')\n accuracy = torch.tensor(0., device='cuda')\n train_logit = model(train_input)\n inner_loss = F.cross_entropy(train_logit, train_target)\n \n params = gradient_update_parameters(model, inner_loss)\n\n test_logit = model(test_input , params=params)\n outer_loss += F.cross_entropy(test_logit, test_target)\n\n with torch.no_grad():\n accuracy += get_accuracy(test_logit, test_target)\n outer_loss.div_(1)\n accuracy.div_(1)\n\n outer_loss.backward()\n meta_optimizer.step()\n accuracy_l.append(accuracy.item())\n if idx > num_of_tasks-1:\n break\n plt.title('MAML miniobjectnet training (100 tasks)')\n plt.xlabel('Tasks (1 epoch)')\n plt.ylabel('Accuracy')\n plt.plot(accuracy_l)\n plt.show()\n print(sum(accuracy_l) / len(accuracy_l))\n \n",
"step-ids": [
1,
2,
4,
5,
6
]
}
|
[
1,
2,
4,
5,
6
] |
# coding=utf-8
"""
author: wlc
function: 百科检索数据层
"""
# 引入外部库
import json
import re
from bs4 import BeautifulSoup
# 引入内部库
from src.util.reptile import *
class EncyclopediaDao:
@staticmethod
def get_key_content (key: str) -> list:
"""
获取指定关键字的百科内容检索内容
:param key:
:return:
"""
# 1.参数设置
url = 'https://zh.wikipedia.org/w/api.php?'
parm = {
'action': 'query',
'list': 'search',
'srsearch': key,
'format': 'json',
'formatversion': '2'
}
# 2.百科内容获取
reptile = Reptile()
page_content = reptile.get_page_content(url + '&'.join([key + '=' + parm[key] for key in parm]), timeout=3)
content_list = json.loads(page_content)['query']['search']
# 3.百科内容格式化
data = []
prefix = 'https://zh.wikipedia.org/wiki/'
for index, item in enumerate(content_list):
date, time = item['timestamp'].rstrip('Z').split('T')
entry = {
'id': item['pageid'],
'index': index,
'create_date': date,
'create_time': time,
'title': item['title'],
'abstract': re.sub('[<span class=\"searchmatch\">,</span>]', '', item['snippet']),
'url': prefix + item['title'],
}
data.append(entry)
return data
@staticmethod
def get_key_title(key: str) -> list:
"""
获取指定关键字的百科内容检索标题
:param key:
:return:
"""
# 1.参数设置
url = 'https://zh.wikipedia.org/w/api.php?'
parm = {
'action': 'opensearch',
'search': key,
'format': 'json',
'formatversion': '2'
}
# 2.百科内容获取
reptile = Reptile()
page_content = reptile.get_page_content(url + '&'.join([key + '=' + parm[key] for key in parm]), timeout=3)
content_list = json.loads(page_content)[1]
# 3.百科内容格式化
data = []
prefix = 'https://zh.wikipedia.org/wiki/'
for index, item in enumerate(content_list):
entry = {
'index': index,
'title': item,
'url': prefix + item,
}
data.append(entry)
return data
@staticmethod
def get_faq_content(query: str, page: str) -> list:
"""
获取指定query的faq检索内容
:param query:
:param page:
:return:
"""
# 1.参数设置
url = 'https://zhidao.baidu.com/search?'
parm = {
'lm': '0',
'rn': '5',
'pn': page,
'fr': 'search',
'ie': 'gbk',
'word': query
}
# 2.百科内容获取
reptile = Reptile()
page_content = reptile.get_page_content(url + '&'.join([key + '=' + parm[key] for key in parm]), timeout=3, is_cookie=True, charset='gbk')
bs = BeautifulSoup(page_content, "html.parser")
content_list = bs.body.find_all("dl", {'class': 'dl'})
# 3.百科内容格式化
data = []
for item in content_list:
entry = {
'create_date': item.find("dd", {'class': 'dd explain f-light'}).span.text,
'title': item.a.text,
'abstract': item.find("dd", {'class': 'dd answer'}).text,
'url': item.a.get('href')
}
data.append(entry)
return data
|
normal
|
{
"blob_id": "a7f348b258e1d6b02a79c60e4fe54b6d53801f70",
"index": 3877,
"step-1": "<mask token>\n\n\nclass EncyclopediaDao:\n <mask token>\n <mask token>\n\n @staticmethod\n def get_faq_content(query: str, page: str) ->list:\n \"\"\"\n\t\t获取指定query的faq检索内容\n\t\t:param query:\n\t\t:param page:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zhidao.baidu.com/search?'\n parm = {'lm': '0', 'rn': '5', 'pn': page, 'fr': 'search', 'ie':\n 'gbk', 'word': query}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3, is_cookie=True,\n charset='gbk')\n bs = BeautifulSoup(page_content, 'html.parser')\n content_list = bs.body.find_all('dl', {'class': 'dl'})\n data = []\n for item in content_list:\n entry = {'create_date': item.find('dd', {'class':\n 'dd explain f-light'}).span.text, 'title': item.a.text,\n 'abstract': item.find('dd', {'class': 'dd answer'}).text,\n 'url': item.a.get('href')}\n data.append(entry)\n return data\n",
"step-2": "<mask token>\n\n\nclass EncyclopediaDao:\n <mask token>\n\n @staticmethod\n def get_key_title(key: str) ->list:\n \"\"\"\n\t\t获取指定关键字的百科内容检索标题\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zh.wikipedia.org/w/api.php?'\n parm = {'action': 'opensearch', 'search': key, 'format': 'json',\n 'formatversion': '2'}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3)\n content_list = json.loads(page_content)[1]\n data = []\n prefix = 'https://zh.wikipedia.org/wiki/'\n for index, item in enumerate(content_list):\n entry = {'index': index, 'title': item, 'url': prefix + item}\n data.append(entry)\n return data\n\n @staticmethod\n def get_faq_content(query: str, page: str) ->list:\n \"\"\"\n\t\t获取指定query的faq检索内容\n\t\t:param query:\n\t\t:param page:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zhidao.baidu.com/search?'\n parm = {'lm': '0', 'rn': '5', 'pn': page, 'fr': 'search', 'ie':\n 'gbk', 'word': query}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3, is_cookie=True,\n charset='gbk')\n bs = BeautifulSoup(page_content, 'html.parser')\n content_list = bs.body.find_all('dl', {'class': 'dl'})\n data = []\n for item in content_list:\n entry = {'create_date': item.find('dd', {'class':\n 'dd explain f-light'}).span.text, 'title': item.a.text,\n 'abstract': item.find('dd', {'class': 'dd answer'}).text,\n 'url': item.a.get('href')}\n data.append(entry)\n return data\n",
"step-3": "<mask token>\n\n\nclass EncyclopediaDao:\n\n @staticmethod\n def get_key_content(key: str) ->list:\n \"\"\"\n\t\t获取指定关键字的百科内容检索内容\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zh.wikipedia.org/w/api.php?'\n parm = {'action': 'query', 'list': 'search', 'srsearch': key,\n 'format': 'json', 'formatversion': '2'}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3)\n content_list = json.loads(page_content)['query']['search']\n data = []\n prefix = 'https://zh.wikipedia.org/wiki/'\n for index, item in enumerate(content_list):\n date, time = item['timestamp'].rstrip('Z').split('T')\n entry = {'id': item['pageid'], 'index': index, 'create_date':\n date, 'create_time': time, 'title': item['title'],\n 'abstract': re.sub('[<span class=\"searchmatch\">,</span>]',\n '', item['snippet']), 'url': prefix + item['title']}\n data.append(entry)\n return data\n\n @staticmethod\n def get_key_title(key: str) ->list:\n \"\"\"\n\t\t获取指定关键字的百科内容检索标题\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zh.wikipedia.org/w/api.php?'\n parm = {'action': 'opensearch', 'search': key, 'format': 'json',\n 'formatversion': '2'}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3)\n content_list = json.loads(page_content)[1]\n data = []\n prefix = 'https://zh.wikipedia.org/wiki/'\n for index, item in enumerate(content_list):\n entry = {'index': index, 'title': item, 'url': prefix + item}\n data.append(entry)\n return data\n\n @staticmethod\n def get_faq_content(query: str, page: str) ->list:\n \"\"\"\n\t\t获取指定query的faq检索内容\n\t\t:param query:\n\t\t:param page:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zhidao.baidu.com/search?'\n parm = {'lm': '0', 'rn': '5', 'pn': page, 'fr': 'search', 'ie':\n 'gbk', 'word': query}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3, is_cookie=True,\n charset='gbk')\n bs = BeautifulSoup(page_content, 'html.parser')\n content_list = bs.body.find_all('dl', {'class': 'dl'})\n data = []\n for item in content_list:\n entry = {'create_date': item.find('dd', {'class':\n 'dd explain f-light'}).span.text, 'title': item.a.text,\n 'abstract': item.find('dd', {'class': 'dd answer'}).text,\n 'url': item.a.get('href')}\n data.append(entry)\n return data\n",
"step-4": "<mask token>\nimport json\nimport re\nfrom bs4 import BeautifulSoup\nfrom src.util.reptile import *\n\n\nclass EncyclopediaDao:\n\n @staticmethod\n def get_key_content(key: str) ->list:\n \"\"\"\n\t\t获取指定关键字的百科内容检索内容\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zh.wikipedia.org/w/api.php?'\n parm = {'action': 'query', 'list': 'search', 'srsearch': key,\n 'format': 'json', 'formatversion': '2'}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3)\n content_list = json.loads(page_content)['query']['search']\n data = []\n prefix = 'https://zh.wikipedia.org/wiki/'\n for index, item in enumerate(content_list):\n date, time = item['timestamp'].rstrip('Z').split('T')\n entry = {'id': item['pageid'], 'index': index, 'create_date':\n date, 'create_time': time, 'title': item['title'],\n 'abstract': re.sub('[<span class=\"searchmatch\">,</span>]',\n '', item['snippet']), 'url': prefix + item['title']}\n data.append(entry)\n return data\n\n @staticmethod\n def get_key_title(key: str) ->list:\n \"\"\"\n\t\t获取指定关键字的百科内容检索标题\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zh.wikipedia.org/w/api.php?'\n parm = {'action': 'opensearch', 'search': key, 'format': 'json',\n 'formatversion': '2'}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3)\n content_list = json.loads(page_content)[1]\n data = []\n prefix = 'https://zh.wikipedia.org/wiki/'\n for index, item in enumerate(content_list):\n entry = {'index': index, 'title': item, 'url': prefix + item}\n data.append(entry)\n return data\n\n @staticmethod\n def get_faq_content(query: str, page: str) ->list:\n \"\"\"\n\t\t获取指定query的faq检索内容\n\t\t:param query:\n\t\t:param page:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zhidao.baidu.com/search?'\n parm = {'lm': '0', 'rn': '5', 'pn': page, 'fr': 'search', 'ie':\n 'gbk', 'word': query}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3, is_cookie=True,\n charset='gbk')\n bs = BeautifulSoup(page_content, 'html.parser')\n content_list = bs.body.find_all('dl', {'class': 'dl'})\n data = []\n for item in content_list:\n entry = {'create_date': item.find('dd', {'class':\n 'dd explain f-light'}).span.text, 'title': item.a.text,\n 'abstract': item.find('dd', {'class': 'dd answer'}).text,\n 'url': item.a.get('href')}\n data.append(entry)\n return data\n",
"step-5": "# coding=utf-8\n\n\"\"\"\nauthor: wlc\nfunction: 百科检索数据层\n\"\"\"\n\n# 引入外部库\nimport json\nimport re\nfrom bs4 import BeautifulSoup\n\n# 引入内部库\nfrom src.util.reptile import *\n\n\nclass EncyclopediaDao:\n\t@staticmethod\n\tdef get_key_content (key: str) -> list:\n\t\t\"\"\"\n\t\t获取指定关键字的百科内容检索内容\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n\t\t# 1.参数设置\n\t\turl = 'https://zh.wikipedia.org/w/api.php?'\n\t\tparm = {\n\t\t\t'action': 'query',\n\t\t\t'list': 'search',\n\t\t\t'srsearch': key,\n\t\t\t'format': 'json',\n\t\t\t'formatversion': '2'\n\t\t}\n\n\t\t# 2.百科内容获取\n\t\treptile = Reptile()\n\t\tpage_content = reptile.get_page_content(url + '&'.join([key + '=' + parm[key] for key in parm]), timeout=3)\n\t\tcontent_list = json.loads(page_content)['query']['search']\n\n\t\t# 3.百科内容格式化\n\t\tdata = []\n\t\tprefix = 'https://zh.wikipedia.org/wiki/'\n\t\tfor index, item in enumerate(content_list):\n\t\t\tdate, time = item['timestamp'].rstrip('Z').split('T')\n\t\t\tentry = {\n\t\t\t\t'id': item['pageid'],\n\t\t\t\t'index': index,\n\t\t\t\t'create_date': date,\n\t\t\t\t'create_time': time,\n\t\t\t\t'title': item['title'],\n\t\t\t\t'abstract': re.sub('[<span class=\\\"searchmatch\\\">,</span>]', '', item['snippet']),\n\t\t\t\t'url': prefix + item['title'],\n\t\t\t}\n\t\t\tdata.append(entry)\n\n\t\treturn data\n\n\t@staticmethod\n\tdef get_key_title(key: str) -> list:\n\t\t\"\"\"\n\t\t获取指定关键字的百科内容检索标题\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n\t\t# 1.参数设置\n\t\turl = 'https://zh.wikipedia.org/w/api.php?'\n\t\tparm = {\n\t\t\t'action': 'opensearch',\n\t\t\t'search': key,\n\t\t\t'format': 'json',\n\t\t\t'formatversion': '2'\n\t\t}\n\n\t\t# 2.百科内容获取\n\t\treptile = Reptile()\n\t\tpage_content = reptile.get_page_content(url + '&'.join([key + '=' + parm[key] for key in parm]), timeout=3)\n\t\tcontent_list = json.loads(page_content)[1]\n\n\t\t# 3.百科内容格式化\n\t\tdata = []\n\t\tprefix = 'https://zh.wikipedia.org/wiki/'\n\t\tfor index, item in enumerate(content_list):\n\t\t\tentry = {\n\t\t\t\t'index': index,\n\t\t\t\t'title': item,\n\t\t\t\t'url': prefix + item,\n\t\t\t}\n\t\t\tdata.append(entry)\n\n\t\treturn data\n\n\t@staticmethod\n\tdef get_faq_content(query: str, page: str) -> list:\n\t\t\"\"\"\n\t\t获取指定query的faq检索内容\n\t\t:param query:\n\t\t:param page:\n\t\t:return:\n\t\t\"\"\"\n\t\t# 1.参数设置\n\t\turl = 'https://zhidao.baidu.com/search?'\n\t\tparm = {\n\t\t\t'lm': '0',\n\t\t\t'rn': '5',\n\t\t\t'pn': page,\n\t\t\t'fr': 'search',\n\t\t\t'ie': 'gbk',\n\t\t\t'word': query\n\t\t}\n\n\t\t# 2.百科内容获取\n\t\treptile = Reptile()\n\t\tpage_content = reptile.get_page_content(url + '&'.join([key + '=' + parm[key] for key in parm]), timeout=3, is_cookie=True, charset='gbk')\n\t\tbs = BeautifulSoup(page_content, \"html.parser\")\n\t\tcontent_list = bs.body.find_all(\"dl\", {'class': 'dl'})\n\n\t\t# 3.百科内容格式化\n\t\tdata = []\n\t\tfor item in content_list:\n\t\t\tentry = {\n\t\t\t\t'create_date': item.find(\"dd\", {'class': 'dd explain f-light'}).span.text,\n\t\t\t\t'title': item.a.text,\n\t\t\t\t'abstract': item.find(\"dd\", {'class': 'dd answer'}).text,\n\t\t\t\t'url': item.a.get('href')\n\t\t\t}\n\t\t\tdata.append(entry)\n\n\t\treturn data\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.